[ 499.008710] env[61962]: DEBUG os_vif [-] Loaded VIF plugin class '' with name 'linux_bridge' {{(pid=61962) initialize /opt/stack/data/venv/lib/python3.10/site-packages/os_vif/__init__.py:44}} [ 499.009186] env[61962]: DEBUG os_vif [-] Loaded VIF plugin class '' with name 'noop' {{(pid=61962) initialize /opt/stack/data/venv/lib/python3.10/site-packages/os_vif/__init__.py:44}} [ 499.009186] env[61962]: DEBUG os_vif [-] Loaded VIF plugin class '' with name 'ovs' {{(pid=61962) initialize /opt/stack/data/venv/lib/python3.10/site-packages/os_vif/__init__.py:44}} [ 499.009490] env[61962]: INFO os_vif [-] Loaded VIF plugins: linux_bridge, noop, ovs [ 499.103324] env[61962]: DEBUG oslo_concurrency.processutils [-] Running cmd (subprocess): grep -F node.session.scan /sbin/iscsiadm {{(pid=61962) execute /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/processutils.py:390}} [ 499.114317] env[61962]: DEBUG oslo_concurrency.processutils [-] CMD "grep -F node.session.scan /sbin/iscsiadm" returned: 0 in 0.011s {{(pid=61962) execute /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/processutils.py:428}} [ 499.257415] env[61962]: INFO nova.virt.driver [None req-7d292bec-46ac-4408-8c73-51ecef647ce7 None None] Loading compute driver 'vmwareapi.VMwareVCDriver' [ 499.333734] env[61962]: DEBUG oslo_concurrency.lockutils [-] Acquiring lock "oslo_vmware_api_lock" by "oslo_vmware.api.VMwareAPISession._create_session" {{(pid=61962) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 499.333920] env[61962]: DEBUG oslo_concurrency.lockutils [-] Lock "oslo_vmware_api_lock" acquired by "oslo_vmware.api.VMwareAPISession._create_session" :: waited 0.001s {{(pid=61962) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 499.334030] env[61962]: DEBUG oslo_vmware.service [-] Creating suds client with soap_url='https://vc1.osci.c.eu-de-1.cloud.sap:443/sdk' and wsdl_url='https://vc1.osci.c.eu-de-1.cloud.sap:443/sdk/vimService.wsdl' {{(pid=61962) __init__ /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:242}} [ 502.511395] env[61962]: DEBUG oslo_vmware.service [-] Invoking ServiceInstance.RetrieveServiceContent with opID=oslo.vmware-7271a29c-2004-46f9-ba1b-5d5ab0175315 {{(pid=61962) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 502.527606] env[61962]: DEBUG oslo_vmware.api [-] Logging into host: vc1.osci.c.eu-de-1.cloud.sap. {{(pid=61962) _create_session /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:242}} [ 502.527807] env[61962]: DEBUG oslo_vmware.service [-] Invoking SessionManager.Login with opID=oslo.vmware-45d0edcd-67c4-43be-bcb8-5526752af0b3 {{(pid=61962) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 502.557020] env[61962]: INFO oslo_vmware.api [-] Successfully established new session; session ID is 811c6. [ 502.557209] env[61962]: DEBUG oslo_concurrency.lockutils [-] Lock "oslo_vmware_api_lock" "released" by "oslo_vmware.api.VMwareAPISession._create_session" :: held 3.223s {{(pid=61962) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 502.557783] env[61962]: INFO nova.virt.vmwareapi.driver [None req-7d292bec-46ac-4408-8c73-51ecef647ce7 None None] VMware vCenter version: 7.0.3 [ 502.561288] env[61962]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bc81863b-624b-4c1c-8a49-a235635886da {{(pid=61962) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 502.579607] env[61962]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-df68f057-31f0-4632-918d-f67beac62c0e {{(pid=61962) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 502.586308] env[61962]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c619cee6-11e7-4dbe-97ff-96edd00d3a66 {{(pid=61962) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 502.593196] env[61962]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7229ebf4-9de7-4f8b-8e30-77cba730a8db {{(pid=61962) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 502.606670] env[61962]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-75eef3be-1d3e-48ec-9790-f2eef07268d9 {{(pid=61962) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 502.612762] env[61962]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5b8081f2-a274-4c5c-b55e-ac669ae7e4a9 {{(pid=61962) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 502.643452] env[61962]: DEBUG oslo_vmware.service [-] Invoking ExtensionManager.FindExtension with opID=oslo.vmware-f333ed0d-6520-4f9b-b368-b23efbec6699 {{(pid=61962) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 502.649169] env[61962]: DEBUG nova.virt.vmwareapi.driver [None req-7d292bec-46ac-4408-8c73-51ecef647ce7 None None] Extension org.openstack.compute already exists. {{(pid=61962) _register_openstack_extension /opt/stack/nova/nova/virt/vmwareapi/driver.py:224}} [ 502.651953] env[61962]: INFO nova.compute.provider_config [None req-7d292bec-46ac-4408-8c73-51ecef647ce7 None None] No provider configs found in /etc/nova/provider_config/. If files are present, ensure the Nova process has access. [ 502.670528] env[61962]: DEBUG nova.context [None req-7d292bec-46ac-4408-8c73-51ecef647ce7 None None] Found 2 cells: 00000000-0000-0000-0000-000000000000(cell0),d06a6285-50ad-48de-9cc0-0c34a03e49c4(cell1) {{(pid=61962) load_cells /opt/stack/nova/nova/context.py:464}} [ 502.672589] env[61962]: DEBUG oslo_concurrency.lockutils [None req-b30c84a2-b146-4d0b-8208-942de4bf74be None None] Acquiring lock "00000000-0000-0000-0000-000000000000" by "nova.context.set_target_cell..get_or_set_cached_cell_and_set_connections" {{(pid=61962) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 502.672840] env[61962]: DEBUG oslo_concurrency.lockutils [None req-b30c84a2-b146-4d0b-8208-942de4bf74be None None] Lock "00000000-0000-0000-0000-000000000000" acquired by "nova.context.set_target_cell..get_or_set_cached_cell_and_set_connections" :: waited 0.000s {{(pid=61962) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 502.673521] env[61962]: DEBUG oslo_concurrency.lockutils [None req-b30c84a2-b146-4d0b-8208-942de4bf74be None None] Lock "00000000-0000-0000-0000-000000000000" "released" by "nova.context.set_target_cell..get_or_set_cached_cell_and_set_connections" :: held 0.001s {{(pid=61962) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 502.673943] env[61962]: DEBUG oslo_concurrency.lockutils [None req-b30c84a2-b146-4d0b-8208-942de4bf74be None None] Acquiring lock "d06a6285-50ad-48de-9cc0-0c34a03e49c4" by "nova.context.set_target_cell..get_or_set_cached_cell_and_set_connections" {{(pid=61962) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 502.674140] env[61962]: DEBUG oslo_concurrency.lockutils [None req-b30c84a2-b146-4d0b-8208-942de4bf74be None None] Lock "d06a6285-50ad-48de-9cc0-0c34a03e49c4" acquired by "nova.context.set_target_cell..get_or_set_cached_cell_and_set_connections" :: waited 0.000s {{(pid=61962) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 502.675146] env[61962]: DEBUG oslo_concurrency.lockutils [None req-b30c84a2-b146-4d0b-8208-942de4bf74be None None] Lock "d06a6285-50ad-48de-9cc0-0c34a03e49c4" "released" by "nova.context.set_target_cell..get_or_set_cached_cell_and_set_connections" :: held 0.001s {{(pid=61962) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 502.696576] env[61962]: INFO dbcounter [None req-b30c84a2-b146-4d0b-8208-942de4bf74be None None] Registered counter for database nova_cell0 [ 502.704699] env[61962]: INFO dbcounter [None req-b30c84a2-b146-4d0b-8208-942de4bf74be None None] Registered counter for database nova_cell1 [ 502.708537] env[61962]: DEBUG oslo_db.sqlalchemy.engines [None req-b30c84a2-b146-4d0b-8208-942de4bf74be None None] MySQL server mode set to STRICT_TRANS_TABLES,STRICT_ALL_TABLES,NO_ZERO_IN_DATE,NO_ZERO_DATE,ERROR_FOR_DIVISION_BY_ZERO,TRADITIONAL,NO_AUTO_CREATE_USER,NO_ENGINE_SUBSTITUTION {{(pid=61962) _check_effective_sql_mode /opt/stack/data/venv/lib/python3.10/site-packages/oslo_db/sqlalchemy/engines.py:342}} [ 502.708881] env[61962]: DEBUG oslo_db.sqlalchemy.engines [None req-b30c84a2-b146-4d0b-8208-942de4bf74be None None] MySQL server mode set to STRICT_TRANS_TABLES,STRICT_ALL_TABLES,NO_ZERO_IN_DATE,NO_ZERO_DATE,ERROR_FOR_DIVISION_BY_ZERO,TRADITIONAL,NO_AUTO_CREATE_USER,NO_ENGINE_SUBSTITUTION {{(pid=61962) _check_effective_sql_mode /opt/stack/data/venv/lib/python3.10/site-packages/oslo_db/sqlalchemy/engines.py:342}} [ 502.713651] env[61962]: DEBUG dbcounter [-] [61962] Writer thread running {{(pid=61962) stat_writer /opt/stack/data/venv/lib/python3.10/site-packages/dbcounter.py:102}} [ 502.714381] env[61962]: DEBUG dbcounter [-] [61962] Writer thread running {{(pid=61962) stat_writer /opt/stack/data/venv/lib/python3.10/site-packages/dbcounter.py:102}} [ 502.716971] env[61962]: ERROR nova.db.main.api [None req-b30c84a2-b146-4d0b-8208-942de4bf74be None None] No DB access allowed in nova-compute: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 264, in main [ 502.716971] env[61962]: result = function(*args, **kwargs) [ 502.716971] env[61962]: File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 502.716971] env[61962]: return func(*args, **kwargs) [ 502.716971] env[61962]: File "/opt/stack/nova/nova/context.py", line 422, in gather_result [ 502.716971] env[61962]: result = fn(*args, **kwargs) [ 502.716971] env[61962]: File "/opt/stack/nova/nova/db/main/api.py", line 179, in wrapper [ 502.716971] env[61962]: return f(*args, **kwargs) [ 502.716971] env[61962]: File "/opt/stack/nova/nova/objects/service.py", line 553, in _db_service_get_minimum_version [ 502.716971] env[61962]: return db.service_get_minimum_version(context, binaries) [ 502.716971] env[61962]: File "/opt/stack/nova/nova/db/main/api.py", line 238, in wrapper [ 502.716971] env[61962]: _check_db_access() [ 502.716971] env[61962]: File "/opt/stack/nova/nova/db/main/api.py", line 188, in _check_db_access [ 502.716971] env[61962]: stacktrace = ''.join(traceback.format_stack()) [ 502.716971] env[61962]: [ 502.718035] env[61962]: ERROR nova.db.main.api [None req-b30c84a2-b146-4d0b-8208-942de4bf74be None None] No DB access allowed in nova-compute: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 264, in main [ 502.718035] env[61962]: result = function(*args, **kwargs) [ 502.718035] env[61962]: File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 502.718035] env[61962]: return func(*args, **kwargs) [ 502.718035] env[61962]: File "/opt/stack/nova/nova/context.py", line 422, in gather_result [ 502.718035] env[61962]: result = fn(*args, **kwargs) [ 502.718035] env[61962]: File "/opt/stack/nova/nova/db/main/api.py", line 179, in wrapper [ 502.718035] env[61962]: return f(*args, **kwargs) [ 502.718035] env[61962]: File "/opt/stack/nova/nova/objects/service.py", line 553, in _db_service_get_minimum_version [ 502.718035] env[61962]: return db.service_get_minimum_version(context, binaries) [ 502.718035] env[61962]: File "/opt/stack/nova/nova/db/main/api.py", line 238, in wrapper [ 502.718035] env[61962]: _check_db_access() [ 502.718035] env[61962]: File "/opt/stack/nova/nova/db/main/api.py", line 188, in _check_db_access [ 502.718035] env[61962]: stacktrace = ''.join(traceback.format_stack()) [ 502.718035] env[61962]: [ 502.718443] env[61962]: WARNING nova.objects.service [None req-b30c84a2-b146-4d0b-8208-942de4bf74be None None] Failed to get minimum service version for cell d06a6285-50ad-48de-9cc0-0c34a03e49c4 [ 502.718559] env[61962]: WARNING nova.objects.service [None req-b30c84a2-b146-4d0b-8208-942de4bf74be None None] Failed to get minimum service version for cell 00000000-0000-0000-0000-000000000000 [ 502.719053] env[61962]: DEBUG oslo_concurrency.lockutils [None req-b30c84a2-b146-4d0b-8208-942de4bf74be None None] Acquiring lock "singleton_lock" {{(pid=61962) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 502.719281] env[61962]: DEBUG oslo_concurrency.lockutils [None req-b30c84a2-b146-4d0b-8208-942de4bf74be None None] Acquired lock "singleton_lock" {{(pid=61962) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 502.719550] env[61962]: DEBUG oslo_concurrency.lockutils [None req-b30c84a2-b146-4d0b-8208-942de4bf74be None None] Releasing lock "singleton_lock" {{(pid=61962) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 502.719883] env[61962]: DEBUG oslo_service.service [None req-b30c84a2-b146-4d0b-8208-942de4bf74be None None] Full set of CONF: {{(pid=61962) _wait_for_exit_or_signal /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/service.py:363}} [ 502.720044] env[61962]: DEBUG oslo_service.service [None req-b30c84a2-b146-4d0b-8208-942de4bf74be None None] ******************************************************************************** {{(pid=61962) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2600}} [ 502.720179] env[61962]: DEBUG oslo_service.service [None req-b30c84a2-b146-4d0b-8208-942de4bf74be None None] Configuration options gathered from: {{(pid=61962) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2601}} [ 502.720315] env[61962]: DEBUG oslo_service.service [None req-b30c84a2-b146-4d0b-8208-942de4bf74be None None] command line args: ['--config-file', '/etc/nova/nova.conf', '--config-file', '/etc/nova/nova-cpu-common.conf', '--config-file', '/etc/nova/nova-cpu-1.conf'] {{(pid=61962) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2602}} [ 502.720505] env[61962]: DEBUG oslo_service.service [None req-b30c84a2-b146-4d0b-8208-942de4bf74be None None] config files: ['/etc/nova/nova.conf', '/etc/nova/nova-cpu-common.conf', '/etc/nova/nova-cpu-1.conf'] {{(pid=61962) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2603}} [ 502.720634] env[61962]: DEBUG oslo_service.service [None req-b30c84a2-b146-4d0b-8208-942de4bf74be None None] ================================================================================ {{(pid=61962) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2605}} [ 502.720850] env[61962]: DEBUG oslo_service.service [None req-b30c84a2-b146-4d0b-8208-942de4bf74be None None] allow_resize_to_same_host = True {{(pid=61962) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 502.721033] env[61962]: DEBUG oslo_service.service [None req-b30c84a2-b146-4d0b-8208-942de4bf74be None None] arq_binding_timeout = 300 {{(pid=61962) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 502.721169] env[61962]: DEBUG oslo_service.service [None req-b30c84a2-b146-4d0b-8208-942de4bf74be None None] backdoor_port = None {{(pid=61962) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 502.721299] env[61962]: DEBUG oslo_service.service [None req-b30c84a2-b146-4d0b-8208-942de4bf74be None None] backdoor_socket = None {{(pid=61962) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 502.721467] env[61962]: DEBUG oslo_service.service [None req-b30c84a2-b146-4d0b-8208-942de4bf74be None None] block_device_allocate_retries = 60 {{(pid=61962) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 502.721630] env[61962]: DEBUG oslo_service.service [None req-b30c84a2-b146-4d0b-8208-942de4bf74be None None] block_device_allocate_retries_interval = 3 {{(pid=61962) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 502.721800] env[61962]: DEBUG oslo_service.service [None req-b30c84a2-b146-4d0b-8208-942de4bf74be None None] cert = self.pem {{(pid=61962) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 502.722045] env[61962]: DEBUG oslo_service.service [None req-b30c84a2-b146-4d0b-8208-942de4bf74be None None] compute_driver = vmwareapi.VMwareVCDriver {{(pid=61962) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 502.722142] env[61962]: DEBUG oslo_service.service [None req-b30c84a2-b146-4d0b-8208-942de4bf74be None None] compute_monitors = [] {{(pid=61962) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 502.722311] env[61962]: DEBUG oslo_service.service [None req-b30c84a2-b146-4d0b-8208-942de4bf74be None None] config_dir = [] {{(pid=61962) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 502.722483] env[61962]: DEBUG oslo_service.service [None req-b30c84a2-b146-4d0b-8208-942de4bf74be None None] config_drive_format = iso9660 {{(pid=61962) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 502.722679] env[61962]: DEBUG oslo_service.service [None req-b30c84a2-b146-4d0b-8208-942de4bf74be None None] config_file = ['/etc/nova/nova.conf', '/etc/nova/nova-cpu-common.conf', '/etc/nova/nova-cpu-1.conf'] {{(pid=61962) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 502.722803] env[61962]: DEBUG oslo_service.service [None req-b30c84a2-b146-4d0b-8208-942de4bf74be None None] config_source = [] {{(pid=61962) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 502.722981] env[61962]: DEBUG oslo_service.service [None req-b30c84a2-b146-4d0b-8208-942de4bf74be None None] console_host = devstack {{(pid=61962) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 502.723169] env[61962]: DEBUG oslo_service.service [None req-b30c84a2-b146-4d0b-8208-942de4bf74be None None] control_exchange = nova {{(pid=61962) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 502.723330] env[61962]: DEBUG oslo_service.service [None req-b30c84a2-b146-4d0b-8208-942de4bf74be None None] cpu_allocation_ratio = None {{(pid=61962) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 502.723493] env[61962]: DEBUG oslo_service.service [None req-b30c84a2-b146-4d0b-8208-942de4bf74be None None] daemon = False {{(pid=61962) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 502.723661] env[61962]: DEBUG oslo_service.service [None req-b30c84a2-b146-4d0b-8208-942de4bf74be None None] debug = True {{(pid=61962) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 502.723819] env[61962]: DEBUG oslo_service.service [None req-b30c84a2-b146-4d0b-8208-942de4bf74be None None] default_access_ip_network_name = None {{(pid=61962) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 502.723985] env[61962]: DEBUG oslo_service.service [None req-b30c84a2-b146-4d0b-8208-942de4bf74be None None] default_availability_zone = nova {{(pid=61962) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 502.724161] env[61962]: DEBUG oslo_service.service [None req-b30c84a2-b146-4d0b-8208-942de4bf74be None None] default_ephemeral_format = None {{(pid=61962) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 502.724325] env[61962]: DEBUG oslo_service.service [None req-b30c84a2-b146-4d0b-8208-942de4bf74be None None] default_green_pool_size = 1000 {{(pid=61962) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 502.724565] env[61962]: DEBUG oslo_service.service [None req-b30c84a2-b146-4d0b-8208-942de4bf74be None None] default_log_levels = ['amqp=WARN', 'amqplib=WARN', 'boto=WARN', 'qpid=WARN', 'sqlalchemy=WARN', 'suds=INFO', 'oslo.messaging=INFO', 'oslo_messaging=INFO', 'iso8601=WARN', 'requests.packages.urllib3.connectionpool=WARN', 'urllib3.connectionpool=WARN', 'websocket=WARN', 'requests.packages.urllib3.util.retry=WARN', 'urllib3.util.retry=WARN', 'keystonemiddleware=WARN', 'routes.middleware=WARN', 'stevedore=WARN', 'taskflow=WARN', 'keystoneauth=WARN', 'oslo.cache=INFO', 'oslo_policy=INFO', 'dogpile.core.dogpile=INFO', 'glanceclient=WARN', 'oslo.privsep.daemon=INFO'] {{(pid=61962) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 502.724733] env[61962]: DEBUG oslo_service.service [None req-b30c84a2-b146-4d0b-8208-942de4bf74be None None] default_schedule_zone = None {{(pid=61962) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 502.724893] env[61962]: DEBUG oslo_service.service [None req-b30c84a2-b146-4d0b-8208-942de4bf74be None None] disk_allocation_ratio = None {{(pid=61962) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 502.725080] env[61962]: DEBUG oslo_service.service [None req-b30c84a2-b146-4d0b-8208-942de4bf74be None None] enable_new_services = True {{(pid=61962) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 502.725276] env[61962]: DEBUG oslo_service.service [None req-b30c84a2-b146-4d0b-8208-942de4bf74be None None] enabled_apis = ['osapi_compute'] {{(pid=61962) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 502.725444] env[61962]: DEBUG oslo_service.service [None req-b30c84a2-b146-4d0b-8208-942de4bf74be None None] enabled_ssl_apis = [] {{(pid=61962) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 502.725606] env[61962]: DEBUG oslo_service.service [None req-b30c84a2-b146-4d0b-8208-942de4bf74be None None] flat_injected = False {{(pid=61962) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 502.725763] env[61962]: DEBUG oslo_service.service [None req-b30c84a2-b146-4d0b-8208-942de4bf74be None None] force_config_drive = False {{(pid=61962) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 502.725923] env[61962]: DEBUG oslo_service.service [None req-b30c84a2-b146-4d0b-8208-942de4bf74be None None] force_raw_images = True {{(pid=61962) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 502.726101] env[61962]: DEBUG oslo_service.service [None req-b30c84a2-b146-4d0b-8208-942de4bf74be None None] graceful_shutdown_timeout = 5 {{(pid=61962) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 502.726265] env[61962]: DEBUG oslo_service.service [None req-b30c84a2-b146-4d0b-8208-942de4bf74be None None] heal_instance_info_cache_interval = 60 {{(pid=61962) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 502.726483] env[61962]: DEBUG oslo_service.service [None req-b30c84a2-b146-4d0b-8208-942de4bf74be None None] host = cpu-1 {{(pid=61962) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 502.726659] env[61962]: DEBUG oslo_service.service [None req-b30c84a2-b146-4d0b-8208-942de4bf74be None None] initial_cpu_allocation_ratio = 4.0 {{(pid=61962) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 502.726819] env[61962]: DEBUG oslo_service.service [None req-b30c84a2-b146-4d0b-8208-942de4bf74be None None] initial_disk_allocation_ratio = 1.0 {{(pid=61962) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 502.726977] env[61962]: DEBUG oslo_service.service [None req-b30c84a2-b146-4d0b-8208-942de4bf74be None None] initial_ram_allocation_ratio = 1.0 {{(pid=61962) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 502.727206] env[61962]: DEBUG oslo_service.service [None req-b30c84a2-b146-4d0b-8208-942de4bf74be None None] injected_network_template = /opt/stack/nova/nova/virt/interfaces.template {{(pid=61962) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 502.727371] env[61962]: DEBUG oslo_service.service [None req-b30c84a2-b146-4d0b-8208-942de4bf74be None None] instance_build_timeout = 0 {{(pid=61962) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 502.727529] env[61962]: DEBUG oslo_service.service [None req-b30c84a2-b146-4d0b-8208-942de4bf74be None None] instance_delete_interval = 300 {{(pid=61962) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 502.727692] env[61962]: DEBUG oslo_service.service [None req-b30c84a2-b146-4d0b-8208-942de4bf74be None None] instance_format = [instance: %(uuid)s] {{(pid=61962) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 502.727858] env[61962]: DEBUG oslo_service.service [None req-b30c84a2-b146-4d0b-8208-942de4bf74be None None] instance_name_template = instance-%08x {{(pid=61962) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 502.728031] env[61962]: DEBUG oslo_service.service [None req-b30c84a2-b146-4d0b-8208-942de4bf74be None None] instance_usage_audit = False {{(pid=61962) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 502.728205] env[61962]: DEBUG oslo_service.service [None req-b30c84a2-b146-4d0b-8208-942de4bf74be None None] instance_usage_audit_period = month {{(pid=61962) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 502.728371] env[61962]: DEBUG oslo_service.service [None req-b30c84a2-b146-4d0b-8208-942de4bf74be None None] instance_uuid_format = [instance: %(uuid)s] {{(pid=61962) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 502.728536] env[61962]: DEBUG oslo_service.service [None req-b30c84a2-b146-4d0b-8208-942de4bf74be None None] instances_path = /opt/stack/data/nova/instances {{(pid=61962) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 502.728701] env[61962]: DEBUG oslo_service.service [None req-b30c84a2-b146-4d0b-8208-942de4bf74be None None] internal_service_availability_zone = internal {{(pid=61962) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 502.728859] env[61962]: DEBUG oslo_service.service [None req-b30c84a2-b146-4d0b-8208-942de4bf74be None None] key = None {{(pid=61962) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 502.729028] env[61962]: DEBUG oslo_service.service [None req-b30c84a2-b146-4d0b-8208-942de4bf74be None None] live_migration_retry_count = 30 {{(pid=61962) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 502.729195] env[61962]: DEBUG oslo_service.service [None req-b30c84a2-b146-4d0b-8208-942de4bf74be None None] log_config_append = None {{(pid=61962) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 502.729358] env[61962]: DEBUG oslo_service.service [None req-b30c84a2-b146-4d0b-8208-942de4bf74be None None] log_date_format = %Y-%m-%d %H:%M:%S {{(pid=61962) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 502.729515] env[61962]: DEBUG oslo_service.service [None req-b30c84a2-b146-4d0b-8208-942de4bf74be None None] log_dir = None {{(pid=61962) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 502.729672] env[61962]: DEBUG oslo_service.service [None req-b30c84a2-b146-4d0b-8208-942de4bf74be None None] log_file = None {{(pid=61962) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 502.729801] env[61962]: DEBUG oslo_service.service [None req-b30c84a2-b146-4d0b-8208-942de4bf74be None None] log_options = True {{(pid=61962) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 502.729962] env[61962]: DEBUG oslo_service.service [None req-b30c84a2-b146-4d0b-8208-942de4bf74be None None] log_rotate_interval = 1 {{(pid=61962) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 502.730143] env[61962]: DEBUG oslo_service.service [None req-b30c84a2-b146-4d0b-8208-942de4bf74be None None] log_rotate_interval_type = days {{(pid=61962) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 502.730308] env[61962]: DEBUG oslo_service.service [None req-b30c84a2-b146-4d0b-8208-942de4bf74be None None] log_rotation_type = none {{(pid=61962) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 502.730440] env[61962]: DEBUG oslo_service.service [None req-b30c84a2-b146-4d0b-8208-942de4bf74be None None] logging_context_format_string = %(color)s%(levelname)s %(name)s [%(global_request_id)s %(request_id)s %(project_name)s %(user_name)s%(color)s] %(instance)s%(color)s%(message)s {{(pid=61962) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 502.730585] env[61962]: DEBUG oslo_service.service [None req-b30c84a2-b146-4d0b-8208-942de4bf74be None None] logging_debug_format_suffix = {{(pid=%(process)d) %(funcName)s %(pathname)s:%(lineno)d}} {{(pid=61962) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 502.730763] env[61962]: DEBUG oslo_service.service [None req-b30c84a2-b146-4d0b-8208-942de4bf74be None None] logging_default_format_string = %(color)s%(levelname)s %(name)s [-%(color)s] %(instance)s%(color)s%(message)s {{(pid=61962) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 502.730930] env[61962]: DEBUG oslo_service.service [None req-b30c84a2-b146-4d0b-8208-942de4bf74be None None] logging_exception_prefix = ERROR %(name)s %(instance)s {{(pid=61962) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 502.731071] env[61962]: DEBUG oslo_service.service [None req-b30c84a2-b146-4d0b-8208-942de4bf74be None None] logging_user_identity_format = %(user)s %(project)s %(domain)s %(system_scope)s %(user_domain)s %(project_domain)s {{(pid=61962) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 502.731237] env[61962]: DEBUG oslo_service.service [None req-b30c84a2-b146-4d0b-8208-942de4bf74be None None] long_rpc_timeout = 1800 {{(pid=61962) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 502.731396] env[61962]: DEBUG oslo_service.service [None req-b30c84a2-b146-4d0b-8208-942de4bf74be None None] max_concurrent_builds = 10 {{(pid=61962) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 502.731554] env[61962]: DEBUG oslo_service.service [None req-b30c84a2-b146-4d0b-8208-942de4bf74be None None] max_concurrent_live_migrations = 1 {{(pid=61962) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 502.731711] env[61962]: DEBUG oslo_service.service [None req-b30c84a2-b146-4d0b-8208-942de4bf74be None None] max_concurrent_snapshots = 5 {{(pid=61962) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 502.731868] env[61962]: DEBUG oslo_service.service [None req-b30c84a2-b146-4d0b-8208-942de4bf74be None None] max_local_block_devices = 3 {{(pid=61962) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 502.732035] env[61962]: DEBUG oslo_service.service [None req-b30c84a2-b146-4d0b-8208-942de4bf74be None None] max_logfile_count = 30 {{(pid=61962) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 502.732198] env[61962]: DEBUG oslo_service.service [None req-b30c84a2-b146-4d0b-8208-942de4bf74be None None] max_logfile_size_mb = 200 {{(pid=61962) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 502.732355] env[61962]: DEBUG oslo_service.service [None req-b30c84a2-b146-4d0b-8208-942de4bf74be None None] maximum_instance_delete_attempts = 5 {{(pid=61962) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 502.732522] env[61962]: DEBUG oslo_service.service [None req-b30c84a2-b146-4d0b-8208-942de4bf74be None None] metadata_listen = 0.0.0.0 {{(pid=61962) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 502.732685] env[61962]: DEBUG oslo_service.service [None req-b30c84a2-b146-4d0b-8208-942de4bf74be None None] metadata_listen_port = 8775 {{(pid=61962) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 502.732929] env[61962]: DEBUG oslo_service.service [None req-b30c84a2-b146-4d0b-8208-942de4bf74be None None] metadata_workers = 2 {{(pid=61962) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 502.733055] env[61962]: DEBUG oslo_service.service [None req-b30c84a2-b146-4d0b-8208-942de4bf74be None None] migrate_max_retries = -1 {{(pid=61962) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 502.733229] env[61962]: DEBUG oslo_service.service [None req-b30c84a2-b146-4d0b-8208-942de4bf74be None None] mkisofs_cmd = genisoimage {{(pid=61962) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 502.733433] env[61962]: DEBUG oslo_service.service [None req-b30c84a2-b146-4d0b-8208-942de4bf74be None None] my_block_storage_ip = 10.180.1.21 {{(pid=61962) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 502.733566] env[61962]: DEBUG oslo_service.service [None req-b30c84a2-b146-4d0b-8208-942de4bf74be None None] my_ip = 10.180.1.21 {{(pid=61962) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 502.733796] env[61962]: DEBUG oslo_service.service [None req-b30c84a2-b146-4d0b-8208-942de4bf74be None None] network_allocate_retries = 0 {{(pid=61962) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 502.734008] env[61962]: DEBUG oslo_service.service [None req-b30c84a2-b146-4d0b-8208-942de4bf74be None None] non_inheritable_image_properties = ['cache_in_nova', 'bittorrent'] {{(pid=61962) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 502.734190] env[61962]: DEBUG oslo_service.service [None req-b30c84a2-b146-4d0b-8208-942de4bf74be None None] osapi_compute_listen = 0.0.0.0 {{(pid=61962) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 502.734356] env[61962]: DEBUG oslo_service.service [None req-b30c84a2-b146-4d0b-8208-942de4bf74be None None] osapi_compute_listen_port = 8774 {{(pid=61962) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 502.734523] env[61962]: DEBUG oslo_service.service [None req-b30c84a2-b146-4d0b-8208-942de4bf74be None None] osapi_compute_unique_server_name_scope = {{(pid=61962) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 502.734692] env[61962]: DEBUG oslo_service.service [None req-b30c84a2-b146-4d0b-8208-942de4bf74be None None] osapi_compute_workers = 2 {{(pid=61962) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 502.734871] env[61962]: DEBUG oslo_service.service [None req-b30c84a2-b146-4d0b-8208-942de4bf74be None None] password_length = 12 {{(pid=61962) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 502.735077] env[61962]: DEBUG oslo_service.service [None req-b30c84a2-b146-4d0b-8208-942de4bf74be None None] periodic_enable = True {{(pid=61962) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 502.735249] env[61962]: DEBUG oslo_service.service [None req-b30c84a2-b146-4d0b-8208-942de4bf74be None None] periodic_fuzzy_delay = 60 {{(pid=61962) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 502.735418] env[61962]: DEBUG oslo_service.service [None req-b30c84a2-b146-4d0b-8208-942de4bf74be None None] pointer_model = usbtablet {{(pid=61962) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 502.735587] env[61962]: DEBUG oslo_service.service [None req-b30c84a2-b146-4d0b-8208-942de4bf74be None None] preallocate_images = none {{(pid=61962) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 502.735745] env[61962]: DEBUG oslo_service.service [None req-b30c84a2-b146-4d0b-8208-942de4bf74be None None] publish_errors = False {{(pid=61962) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 502.735878] env[61962]: DEBUG oslo_service.service [None req-b30c84a2-b146-4d0b-8208-942de4bf74be None None] pybasedir = /opt/stack/nova {{(pid=61962) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 502.736042] env[61962]: DEBUG oslo_service.service [None req-b30c84a2-b146-4d0b-8208-942de4bf74be None None] ram_allocation_ratio = None {{(pid=61962) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 502.736206] env[61962]: DEBUG oslo_service.service [None req-b30c84a2-b146-4d0b-8208-942de4bf74be None None] rate_limit_burst = 0 {{(pid=61962) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 502.736370] env[61962]: DEBUG oslo_service.service [None req-b30c84a2-b146-4d0b-8208-942de4bf74be None None] rate_limit_except_level = CRITICAL {{(pid=61962) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 502.736528] env[61962]: DEBUG oslo_service.service [None req-b30c84a2-b146-4d0b-8208-942de4bf74be None None] rate_limit_interval = 0 {{(pid=61962) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 502.736683] env[61962]: DEBUG oslo_service.service [None req-b30c84a2-b146-4d0b-8208-942de4bf74be None None] reboot_timeout = 0 {{(pid=61962) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 502.736838] env[61962]: DEBUG oslo_service.service [None req-b30c84a2-b146-4d0b-8208-942de4bf74be None None] reclaim_instance_interval = 0 {{(pid=61962) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 502.736994] env[61962]: DEBUG oslo_service.service [None req-b30c84a2-b146-4d0b-8208-942de4bf74be None None] record = None {{(pid=61962) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 502.737173] env[61962]: DEBUG oslo_service.service [None req-b30c84a2-b146-4d0b-8208-942de4bf74be None None] reimage_timeout_per_gb = 60 {{(pid=61962) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 502.737337] env[61962]: DEBUG oslo_service.service [None req-b30c84a2-b146-4d0b-8208-942de4bf74be None None] report_interval = 120 {{(pid=61962) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 502.737495] env[61962]: DEBUG oslo_service.service [None req-b30c84a2-b146-4d0b-8208-942de4bf74be None None] rescue_timeout = 0 {{(pid=61962) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 502.737650] env[61962]: DEBUG oslo_service.service [None req-b30c84a2-b146-4d0b-8208-942de4bf74be None None] reserved_host_cpus = 0 {{(pid=61962) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 502.737808] env[61962]: DEBUG oslo_service.service [None req-b30c84a2-b146-4d0b-8208-942de4bf74be None None] reserved_host_disk_mb = 0 {{(pid=61962) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 502.737964] env[61962]: DEBUG oslo_service.service [None req-b30c84a2-b146-4d0b-8208-942de4bf74be None None] reserved_host_memory_mb = 512 {{(pid=61962) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 502.738135] env[61962]: DEBUG oslo_service.service [None req-b30c84a2-b146-4d0b-8208-942de4bf74be None None] reserved_huge_pages = None {{(pid=61962) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 502.738295] env[61962]: DEBUG oslo_service.service [None req-b30c84a2-b146-4d0b-8208-942de4bf74be None None] resize_confirm_window = 0 {{(pid=61962) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 502.738455] env[61962]: DEBUG oslo_service.service [None req-b30c84a2-b146-4d0b-8208-942de4bf74be None None] resize_fs_using_block_device = False {{(pid=61962) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 502.738612] env[61962]: DEBUG oslo_service.service [None req-b30c84a2-b146-4d0b-8208-942de4bf74be None None] resume_guests_state_on_host_boot = False {{(pid=61962) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 502.738780] env[61962]: DEBUG oslo_service.service [None req-b30c84a2-b146-4d0b-8208-942de4bf74be None None] rootwrap_config = /etc/nova/rootwrap.conf {{(pid=61962) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 502.738939] env[61962]: DEBUG oslo_service.service [None req-b30c84a2-b146-4d0b-8208-942de4bf74be None None] rpc_response_timeout = 60 {{(pid=61962) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 502.739108] env[61962]: DEBUG oslo_service.service [None req-b30c84a2-b146-4d0b-8208-942de4bf74be None None] run_external_periodic_tasks = True {{(pid=61962) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 502.739277] env[61962]: DEBUG oslo_service.service [None req-b30c84a2-b146-4d0b-8208-942de4bf74be None None] running_deleted_instance_action = reap {{(pid=61962) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 502.739435] env[61962]: DEBUG oslo_service.service [None req-b30c84a2-b146-4d0b-8208-942de4bf74be None None] running_deleted_instance_poll_interval = 1800 {{(pid=61962) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 502.739592] env[61962]: DEBUG oslo_service.service [None req-b30c84a2-b146-4d0b-8208-942de4bf74be None None] running_deleted_instance_timeout = 0 {{(pid=61962) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 502.739747] env[61962]: DEBUG oslo_service.service [None req-b30c84a2-b146-4d0b-8208-942de4bf74be None None] scheduler_instance_sync_interval = 120 {{(pid=61962) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 502.739915] env[61962]: DEBUG oslo_service.service [None req-b30c84a2-b146-4d0b-8208-942de4bf74be None None] service_down_time = 720 {{(pid=61962) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 502.740093] env[61962]: DEBUG oslo_service.service [None req-b30c84a2-b146-4d0b-8208-942de4bf74be None None] servicegroup_driver = db {{(pid=61962) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 502.740256] env[61962]: DEBUG oslo_service.service [None req-b30c84a2-b146-4d0b-8208-942de4bf74be None None] shelved_offload_time = 0 {{(pid=61962) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 502.740414] env[61962]: DEBUG oslo_service.service [None req-b30c84a2-b146-4d0b-8208-942de4bf74be None None] shelved_poll_interval = 3600 {{(pid=61962) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 502.740578] env[61962]: DEBUG oslo_service.service [None req-b30c84a2-b146-4d0b-8208-942de4bf74be None None] shutdown_timeout = 0 {{(pid=61962) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 502.740758] env[61962]: DEBUG oslo_service.service [None req-b30c84a2-b146-4d0b-8208-942de4bf74be None None] source_is_ipv6 = False {{(pid=61962) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 502.740932] env[61962]: DEBUG oslo_service.service [None req-b30c84a2-b146-4d0b-8208-942de4bf74be None None] ssl_only = False {{(pid=61962) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 502.741197] env[61962]: DEBUG oslo_service.service [None req-b30c84a2-b146-4d0b-8208-942de4bf74be None None] state_path = /opt/stack/data/n-cpu-1 {{(pid=61962) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 502.741367] env[61962]: DEBUG oslo_service.service [None req-b30c84a2-b146-4d0b-8208-942de4bf74be None None] sync_power_state_interval = 600 {{(pid=61962) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 502.741529] env[61962]: DEBUG oslo_service.service [None req-b30c84a2-b146-4d0b-8208-942de4bf74be None None] sync_power_state_pool_size = 1000 {{(pid=61962) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 502.741697] env[61962]: DEBUG oslo_service.service [None req-b30c84a2-b146-4d0b-8208-942de4bf74be None None] syslog_log_facility = LOG_USER {{(pid=61962) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 502.741855] env[61962]: DEBUG oslo_service.service [None req-b30c84a2-b146-4d0b-8208-942de4bf74be None None] tempdir = None {{(pid=61962) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 502.742019] env[61962]: DEBUG oslo_service.service [None req-b30c84a2-b146-4d0b-8208-942de4bf74be None None] timeout_nbd = 10 {{(pid=61962) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 502.742188] env[61962]: DEBUG oslo_service.service [None req-b30c84a2-b146-4d0b-8208-942de4bf74be None None] transport_url = **** {{(pid=61962) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 502.742351] env[61962]: DEBUG oslo_service.service [None req-b30c84a2-b146-4d0b-8208-942de4bf74be None None] update_resources_interval = 0 {{(pid=61962) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 502.742510] env[61962]: DEBUG oslo_service.service [None req-b30c84a2-b146-4d0b-8208-942de4bf74be None None] use_cow_images = True {{(pid=61962) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 502.742665] env[61962]: DEBUG oslo_service.service [None req-b30c84a2-b146-4d0b-8208-942de4bf74be None None] use_eventlog = False {{(pid=61962) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 502.742857] env[61962]: DEBUG oslo_service.service [None req-b30c84a2-b146-4d0b-8208-942de4bf74be None None] use_journal = False {{(pid=61962) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 502.743058] env[61962]: DEBUG oslo_service.service [None req-b30c84a2-b146-4d0b-8208-942de4bf74be None None] use_json = False {{(pid=61962) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 502.743194] env[61962]: DEBUG oslo_service.service [None req-b30c84a2-b146-4d0b-8208-942de4bf74be None None] use_rootwrap_daemon = False {{(pid=61962) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 502.743350] env[61962]: DEBUG oslo_service.service [None req-b30c84a2-b146-4d0b-8208-942de4bf74be None None] use_stderr = False {{(pid=61962) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 502.743505] env[61962]: DEBUG oslo_service.service [None req-b30c84a2-b146-4d0b-8208-942de4bf74be None None] use_syslog = False {{(pid=61962) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 502.743658] env[61962]: DEBUG oslo_service.service [None req-b30c84a2-b146-4d0b-8208-942de4bf74be None None] vcpu_pin_set = None {{(pid=61962) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 502.743825] env[61962]: DEBUG oslo_service.service [None req-b30c84a2-b146-4d0b-8208-942de4bf74be None None] vif_plugging_is_fatal = True {{(pid=61962) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 502.743989] env[61962]: DEBUG oslo_service.service [None req-b30c84a2-b146-4d0b-8208-942de4bf74be None None] vif_plugging_timeout = 300 {{(pid=61962) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 502.744165] env[61962]: DEBUG oslo_service.service [None req-b30c84a2-b146-4d0b-8208-942de4bf74be None None] virt_mkfs = [] {{(pid=61962) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 502.744324] env[61962]: DEBUG oslo_service.service [None req-b30c84a2-b146-4d0b-8208-942de4bf74be None None] volume_usage_poll_interval = 0 {{(pid=61962) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 502.744482] env[61962]: DEBUG oslo_service.service [None req-b30c84a2-b146-4d0b-8208-942de4bf74be None None] watch_log_file = False {{(pid=61962) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 502.744648] env[61962]: DEBUG oslo_service.service [None req-b30c84a2-b146-4d0b-8208-942de4bf74be None None] web = /usr/share/spice-html5 {{(pid=61962) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 502.744828] env[61962]: DEBUG oslo_service.service [None req-b30c84a2-b146-4d0b-8208-942de4bf74be None None] oslo_concurrency.disable_process_locking = False {{(pid=61962) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 502.745147] env[61962]: DEBUG oslo_service.service [None req-b30c84a2-b146-4d0b-8208-942de4bf74be None None] oslo_concurrency.lock_path = /opt/stack/data/n-cpu-1 {{(pid=61962) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 502.745339] env[61962]: DEBUG oslo_service.service [None req-b30c84a2-b146-4d0b-8208-942de4bf74be None None] oslo_messaging_metrics.metrics_buffer_size = 1000 {{(pid=61962) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 502.745505] env[61962]: DEBUG oslo_service.service [None req-b30c84a2-b146-4d0b-8208-942de4bf74be None None] oslo_messaging_metrics.metrics_enabled = False {{(pid=61962) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 502.745686] env[61962]: DEBUG oslo_service.service [None req-b30c84a2-b146-4d0b-8208-942de4bf74be None None] oslo_messaging_metrics.metrics_process_name = {{(pid=61962) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 502.745858] env[61962]: DEBUG oslo_service.service [None req-b30c84a2-b146-4d0b-8208-942de4bf74be None None] oslo_messaging_metrics.metrics_socket_file = /var/tmp/metrics_collector.sock {{(pid=61962) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 502.746032] env[61962]: DEBUG oslo_service.service [None req-b30c84a2-b146-4d0b-8208-942de4bf74be None None] oslo_messaging_metrics.metrics_thread_stop_timeout = 10 {{(pid=61962) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 502.746218] env[61962]: DEBUG oslo_service.service [None req-b30c84a2-b146-4d0b-8208-942de4bf74be None None] api.auth_strategy = keystone {{(pid=61962) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 502.746383] env[61962]: DEBUG oslo_service.service [None req-b30c84a2-b146-4d0b-8208-942de4bf74be None None] api.compute_link_prefix = None {{(pid=61962) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 502.746557] env[61962]: DEBUG oslo_service.service [None req-b30c84a2-b146-4d0b-8208-942de4bf74be None None] api.config_drive_skip_versions = 1.0 2007-01-19 2007-03-01 2007-08-29 2007-10-10 2007-12-15 2008-02-01 2008-09-01 {{(pid=61962) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 502.746728] env[61962]: DEBUG oslo_service.service [None req-b30c84a2-b146-4d0b-8208-942de4bf74be None None] api.dhcp_domain = novalocal {{(pid=61962) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 502.746903] env[61962]: DEBUG oslo_service.service [None req-b30c84a2-b146-4d0b-8208-942de4bf74be None None] api.enable_instance_password = True {{(pid=61962) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 502.747077] env[61962]: DEBUG oslo_service.service [None req-b30c84a2-b146-4d0b-8208-942de4bf74be None None] api.glance_link_prefix = None {{(pid=61962) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 502.747243] env[61962]: DEBUG oslo_service.service [None req-b30c84a2-b146-4d0b-8208-942de4bf74be None None] api.instance_list_cells_batch_fixed_size = 100 {{(pid=61962) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 502.747409] env[61962]: DEBUG oslo_service.service [None req-b30c84a2-b146-4d0b-8208-942de4bf74be None None] api.instance_list_cells_batch_strategy = distributed {{(pid=61962) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 502.747570] env[61962]: DEBUG oslo_service.service [None req-b30c84a2-b146-4d0b-8208-942de4bf74be None None] api.instance_list_per_project_cells = False {{(pid=61962) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 502.747728] env[61962]: DEBUG oslo_service.service [None req-b30c84a2-b146-4d0b-8208-942de4bf74be None None] api.list_records_by_skipping_down_cells = True {{(pid=61962) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 502.747887] env[61962]: DEBUG oslo_service.service [None req-b30c84a2-b146-4d0b-8208-942de4bf74be None None] api.local_metadata_per_cell = False {{(pid=61962) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 502.748061] env[61962]: DEBUG oslo_service.service [None req-b30c84a2-b146-4d0b-8208-942de4bf74be None None] api.max_limit = 1000 {{(pid=61962) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 502.748230] env[61962]: DEBUG oslo_service.service [None req-b30c84a2-b146-4d0b-8208-942de4bf74be None None] api.metadata_cache_expiration = 15 {{(pid=61962) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 502.748402] env[61962]: DEBUG oslo_service.service [None req-b30c84a2-b146-4d0b-8208-942de4bf74be None None] api.neutron_default_tenant_id = default {{(pid=61962) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 502.748569] env[61962]: DEBUG oslo_service.service [None req-b30c84a2-b146-4d0b-8208-942de4bf74be None None] api.use_neutron_default_nets = False {{(pid=61962) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 502.748738] env[61962]: DEBUG oslo_service.service [None req-b30c84a2-b146-4d0b-8208-942de4bf74be None None] api.vendordata_dynamic_connect_timeout = 5 {{(pid=61962) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 502.748906] env[61962]: DEBUG oslo_service.service [None req-b30c84a2-b146-4d0b-8208-942de4bf74be None None] api.vendordata_dynamic_failure_fatal = False {{(pid=61962) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 502.749122] env[61962]: DEBUG oslo_service.service [None req-b30c84a2-b146-4d0b-8208-942de4bf74be None None] api.vendordata_dynamic_read_timeout = 5 {{(pid=61962) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 502.749304] env[61962]: DEBUG oslo_service.service [None req-b30c84a2-b146-4d0b-8208-942de4bf74be None None] api.vendordata_dynamic_ssl_certfile = {{(pid=61962) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 502.749476] env[61962]: DEBUG oslo_service.service [None req-b30c84a2-b146-4d0b-8208-942de4bf74be None None] api.vendordata_dynamic_targets = [] {{(pid=61962) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 502.749643] env[61962]: DEBUG oslo_service.service [None req-b30c84a2-b146-4d0b-8208-942de4bf74be None None] api.vendordata_jsonfile_path = None {{(pid=61962) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 502.749824] env[61962]: DEBUG oslo_service.service [None req-b30c84a2-b146-4d0b-8208-942de4bf74be None None] api.vendordata_providers = ['StaticJSON'] {{(pid=61962) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 502.750028] env[61962]: DEBUG oslo_service.service [None req-b30c84a2-b146-4d0b-8208-942de4bf74be None None] cache.backend = dogpile.cache.memcached {{(pid=61962) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 502.750201] env[61962]: DEBUG oslo_service.service [None req-b30c84a2-b146-4d0b-8208-942de4bf74be None None] cache.backend_argument = **** {{(pid=61962) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 502.750376] env[61962]: DEBUG oslo_service.service [None req-b30c84a2-b146-4d0b-8208-942de4bf74be None None] cache.config_prefix = cache.oslo {{(pid=61962) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 502.750547] env[61962]: DEBUG oslo_service.service [None req-b30c84a2-b146-4d0b-8208-942de4bf74be None None] cache.dead_timeout = 60.0 {{(pid=61962) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 502.750717] env[61962]: DEBUG oslo_service.service [None req-b30c84a2-b146-4d0b-8208-942de4bf74be None None] cache.debug_cache_backend = False {{(pid=61962) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 502.750899] env[61962]: DEBUG oslo_service.service [None req-b30c84a2-b146-4d0b-8208-942de4bf74be None None] cache.enable_retry_client = False {{(pid=61962) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 502.751088] env[61962]: DEBUG oslo_service.service [None req-b30c84a2-b146-4d0b-8208-942de4bf74be None None] cache.enable_socket_keepalive = False {{(pid=61962) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 502.751263] env[61962]: DEBUG oslo_service.service [None req-b30c84a2-b146-4d0b-8208-942de4bf74be None None] cache.enabled = True {{(pid=61962) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 502.751429] env[61962]: DEBUG oslo_service.service [None req-b30c84a2-b146-4d0b-8208-942de4bf74be None None] cache.enforce_fips_mode = False {{(pid=61962) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 502.751594] env[61962]: DEBUG oslo_service.service [None req-b30c84a2-b146-4d0b-8208-942de4bf74be None None] cache.expiration_time = 600 {{(pid=61962) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 502.751756] env[61962]: DEBUG oslo_service.service [None req-b30c84a2-b146-4d0b-8208-942de4bf74be None None] cache.hashclient_retry_attempts = 2 {{(pid=61962) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 502.751922] env[61962]: DEBUG oslo_service.service [None req-b30c84a2-b146-4d0b-8208-942de4bf74be None None] cache.hashclient_retry_delay = 1.0 {{(pid=61962) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 502.752097] env[61962]: DEBUG oslo_service.service [None req-b30c84a2-b146-4d0b-8208-942de4bf74be None None] cache.memcache_dead_retry = 300 {{(pid=61962) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 502.752260] env[61962]: DEBUG oslo_service.service [None req-b30c84a2-b146-4d0b-8208-942de4bf74be None None] cache.memcache_password = **** {{(pid=61962) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 502.752420] env[61962]: DEBUG oslo_service.service [None req-b30c84a2-b146-4d0b-8208-942de4bf74be None None] cache.memcache_pool_connection_get_timeout = 10 {{(pid=61962) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 502.752581] env[61962]: DEBUG oslo_service.service [None req-b30c84a2-b146-4d0b-8208-942de4bf74be None None] cache.memcache_pool_flush_on_reconnect = False {{(pid=61962) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 502.752762] env[61962]: DEBUG oslo_service.service [None req-b30c84a2-b146-4d0b-8208-942de4bf74be None None] cache.memcache_pool_maxsize = 10 {{(pid=61962) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 502.752941] env[61962]: DEBUG oslo_service.service [None req-b30c84a2-b146-4d0b-8208-942de4bf74be None None] cache.memcache_pool_unused_timeout = 60 {{(pid=61962) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 502.753167] env[61962]: DEBUG oslo_service.service [None req-b30c84a2-b146-4d0b-8208-942de4bf74be None None] cache.memcache_sasl_enabled = False {{(pid=61962) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 502.753300] env[61962]: DEBUG oslo_service.service [None req-b30c84a2-b146-4d0b-8208-942de4bf74be None None] cache.memcache_servers = ['localhost:11211'] {{(pid=61962) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 502.753467] env[61962]: DEBUG oslo_service.service [None req-b30c84a2-b146-4d0b-8208-942de4bf74be None None] cache.memcache_socket_timeout = 1.0 {{(pid=61962) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 502.753626] env[61962]: DEBUG oslo_service.service [None req-b30c84a2-b146-4d0b-8208-942de4bf74be None None] cache.memcache_username = None {{(pid=61962) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 502.753790] env[61962]: DEBUG oslo_service.service [None req-b30c84a2-b146-4d0b-8208-942de4bf74be None None] cache.proxies = [] {{(pid=61962) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 502.753948] env[61962]: DEBUG oslo_service.service [None req-b30c84a2-b146-4d0b-8208-942de4bf74be None None] cache.redis_password = **** {{(pid=61962) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 502.754130] env[61962]: DEBUG oslo_service.service [None req-b30c84a2-b146-4d0b-8208-942de4bf74be None None] cache.redis_sentinel_service_name = mymaster {{(pid=61962) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 502.754306] env[61962]: DEBUG oslo_service.service [None req-b30c84a2-b146-4d0b-8208-942de4bf74be None None] cache.redis_sentinels = ['localhost:26379'] {{(pid=61962) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 502.754477] env[61962]: DEBUG oslo_service.service [None req-b30c84a2-b146-4d0b-8208-942de4bf74be None None] cache.redis_server = localhost:6379 {{(pid=61962) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 502.754642] env[61962]: DEBUG oslo_service.service [None req-b30c84a2-b146-4d0b-8208-942de4bf74be None None] cache.redis_socket_timeout = 1.0 {{(pid=61962) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 502.754801] env[61962]: DEBUG oslo_service.service [None req-b30c84a2-b146-4d0b-8208-942de4bf74be None None] cache.redis_username = None {{(pid=61962) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 502.754966] env[61962]: DEBUG oslo_service.service [None req-b30c84a2-b146-4d0b-8208-942de4bf74be None None] cache.retry_attempts = 2 {{(pid=61962) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 502.755141] env[61962]: DEBUG oslo_service.service [None req-b30c84a2-b146-4d0b-8208-942de4bf74be None None] cache.retry_delay = 0.0 {{(pid=61962) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 502.755304] env[61962]: DEBUG oslo_service.service [None req-b30c84a2-b146-4d0b-8208-942de4bf74be None None] cache.socket_keepalive_count = 1 {{(pid=61962) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 502.755465] env[61962]: DEBUG oslo_service.service [None req-b30c84a2-b146-4d0b-8208-942de4bf74be None None] cache.socket_keepalive_idle = 1 {{(pid=61962) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 502.755624] env[61962]: DEBUG oslo_service.service [None req-b30c84a2-b146-4d0b-8208-942de4bf74be None None] cache.socket_keepalive_interval = 1 {{(pid=61962) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 502.755784] env[61962]: DEBUG oslo_service.service [None req-b30c84a2-b146-4d0b-8208-942de4bf74be None None] cache.tls_allowed_ciphers = None {{(pid=61962) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 502.755942] env[61962]: DEBUG oslo_service.service [None req-b30c84a2-b146-4d0b-8208-942de4bf74be None None] cache.tls_cafile = None {{(pid=61962) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 502.756113] env[61962]: DEBUG oslo_service.service [None req-b30c84a2-b146-4d0b-8208-942de4bf74be None None] cache.tls_certfile = None {{(pid=61962) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 502.756278] env[61962]: DEBUG oslo_service.service [None req-b30c84a2-b146-4d0b-8208-942de4bf74be None None] cache.tls_enabled = False {{(pid=61962) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 502.756441] env[61962]: DEBUG oslo_service.service [None req-b30c84a2-b146-4d0b-8208-942de4bf74be None None] cache.tls_keyfile = None {{(pid=61962) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 502.756612] env[61962]: DEBUG oslo_service.service [None req-b30c84a2-b146-4d0b-8208-942de4bf74be None None] cinder.auth_section = None {{(pid=61962) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 502.756788] env[61962]: DEBUG oslo_service.service [None req-b30c84a2-b146-4d0b-8208-942de4bf74be None None] cinder.auth_type = password {{(pid=61962) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 502.756952] env[61962]: DEBUG oslo_service.service [None req-b30c84a2-b146-4d0b-8208-942de4bf74be None None] cinder.cafile = None {{(pid=61962) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 502.757142] env[61962]: DEBUG oslo_service.service [None req-b30c84a2-b146-4d0b-8208-942de4bf74be None None] cinder.catalog_info = volumev3::publicURL {{(pid=61962) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 502.757306] env[61962]: DEBUG oslo_service.service [None req-b30c84a2-b146-4d0b-8208-942de4bf74be None None] cinder.certfile = None {{(pid=61962) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 502.757472] env[61962]: DEBUG oslo_service.service [None req-b30c84a2-b146-4d0b-8208-942de4bf74be None None] cinder.collect_timing = False {{(pid=61962) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 502.757636] env[61962]: DEBUG oslo_service.service [None req-b30c84a2-b146-4d0b-8208-942de4bf74be None None] cinder.cross_az_attach = True {{(pid=61962) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 502.757801] env[61962]: DEBUG oslo_service.service [None req-b30c84a2-b146-4d0b-8208-942de4bf74be None None] cinder.debug = False {{(pid=61962) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 502.757960] env[61962]: DEBUG oslo_service.service [None req-b30c84a2-b146-4d0b-8208-942de4bf74be None None] cinder.endpoint_template = None {{(pid=61962) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 502.758138] env[61962]: DEBUG oslo_service.service [None req-b30c84a2-b146-4d0b-8208-942de4bf74be None None] cinder.http_retries = 3 {{(pid=61962) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 502.758305] env[61962]: DEBUG oslo_service.service [None req-b30c84a2-b146-4d0b-8208-942de4bf74be None None] cinder.insecure = False {{(pid=61962) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 502.758466] env[61962]: DEBUG oslo_service.service [None req-b30c84a2-b146-4d0b-8208-942de4bf74be None None] cinder.keyfile = None {{(pid=61962) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 502.758640] env[61962]: DEBUG oslo_service.service [None req-b30c84a2-b146-4d0b-8208-942de4bf74be None None] cinder.os_region_name = RegionOne {{(pid=61962) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 502.758808] env[61962]: DEBUG oslo_service.service [None req-b30c84a2-b146-4d0b-8208-942de4bf74be None None] cinder.split_loggers = False {{(pid=61962) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 502.758969] env[61962]: DEBUG oslo_service.service [None req-b30c84a2-b146-4d0b-8208-942de4bf74be None None] cinder.timeout = None {{(pid=61962) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 502.759154] env[61962]: DEBUG oslo_service.service [None req-b30c84a2-b146-4d0b-8208-942de4bf74be None None] compute.consecutive_build_service_disable_threshold = 10 {{(pid=61962) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 502.759318] env[61962]: DEBUG oslo_service.service [None req-b30c84a2-b146-4d0b-8208-942de4bf74be None None] compute.cpu_dedicated_set = None {{(pid=61962) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 502.759478] env[61962]: DEBUG oslo_service.service [None req-b30c84a2-b146-4d0b-8208-942de4bf74be None None] compute.cpu_shared_set = None {{(pid=61962) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 502.759643] env[61962]: DEBUG oslo_service.service [None req-b30c84a2-b146-4d0b-8208-942de4bf74be None None] compute.image_type_exclude_list = [] {{(pid=61962) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 502.759807] env[61962]: DEBUG oslo_service.service [None req-b30c84a2-b146-4d0b-8208-942de4bf74be None None] compute.live_migration_wait_for_vif_plug = True {{(pid=61962) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 502.759971] env[61962]: DEBUG oslo_service.service [None req-b30c84a2-b146-4d0b-8208-942de4bf74be None None] compute.max_concurrent_disk_ops = 0 {{(pid=61962) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 502.760148] env[61962]: DEBUG oslo_service.service [None req-b30c84a2-b146-4d0b-8208-942de4bf74be None None] compute.max_disk_devices_to_attach = -1 {{(pid=61962) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 502.760317] env[61962]: DEBUG oslo_service.service [None req-b30c84a2-b146-4d0b-8208-942de4bf74be None None] compute.packing_host_numa_cells_allocation_strategy = False {{(pid=61962) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 502.760489] env[61962]: DEBUG oslo_service.service [None req-b30c84a2-b146-4d0b-8208-942de4bf74be None None] compute.provider_config_location = /etc/nova/provider_config/ {{(pid=61962) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 502.760654] env[61962]: DEBUG oslo_service.service [None req-b30c84a2-b146-4d0b-8208-942de4bf74be None None] compute.resource_provider_association_refresh = 300 {{(pid=61962) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 502.760873] env[61962]: DEBUG oslo_service.service [None req-b30c84a2-b146-4d0b-8208-942de4bf74be None None] compute.sharing_providers_max_uuids_per_request = 200 {{(pid=61962) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 502.761069] env[61962]: DEBUG oslo_service.service [None req-b30c84a2-b146-4d0b-8208-942de4bf74be None None] compute.shutdown_retry_interval = 10 {{(pid=61962) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 502.761256] env[61962]: DEBUG oslo_service.service [None req-b30c84a2-b146-4d0b-8208-942de4bf74be None None] compute.vmdk_allowed_types = ['streamOptimized', 'monolithicSparse'] {{(pid=61962) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 502.761461] env[61962]: DEBUG oslo_service.service [None req-b30c84a2-b146-4d0b-8208-942de4bf74be None None] conductor.workers = 2 {{(pid=61962) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 502.761787] env[61962]: DEBUG oslo_service.service [None req-b30c84a2-b146-4d0b-8208-942de4bf74be None None] console.allowed_origins = [] {{(pid=61962) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 502.761787] env[61962]: DEBUG oslo_service.service [None req-b30c84a2-b146-4d0b-8208-942de4bf74be None None] console.ssl_ciphers = None {{(pid=61962) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 502.761943] env[61962]: DEBUG oslo_service.service [None req-b30c84a2-b146-4d0b-8208-942de4bf74be None None] console.ssl_minimum_version = default {{(pid=61962) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 502.762127] env[61962]: DEBUG oslo_service.service [None req-b30c84a2-b146-4d0b-8208-942de4bf74be None None] consoleauth.enforce_session_timeout = False {{(pid=61962) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 502.762302] env[61962]: DEBUG oslo_service.service [None req-b30c84a2-b146-4d0b-8208-942de4bf74be None None] consoleauth.token_ttl = 600 {{(pid=61962) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 502.762535] env[61962]: DEBUG oslo_service.service [None req-b30c84a2-b146-4d0b-8208-942de4bf74be None None] cyborg.cafile = None {{(pid=61962) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 502.762628] env[61962]: DEBUG oslo_service.service [None req-b30c84a2-b146-4d0b-8208-942de4bf74be None None] cyborg.certfile = None {{(pid=61962) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 502.762814] env[61962]: DEBUG oslo_service.service [None req-b30c84a2-b146-4d0b-8208-942de4bf74be None None] cyborg.collect_timing = False {{(pid=61962) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 502.762982] env[61962]: DEBUG oslo_service.service [None req-b30c84a2-b146-4d0b-8208-942de4bf74be None None] cyborg.connect_retries = None {{(pid=61962) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 502.763158] env[61962]: DEBUG oslo_service.service [None req-b30c84a2-b146-4d0b-8208-942de4bf74be None None] cyborg.connect_retry_delay = None {{(pid=61962) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 502.763321] env[61962]: DEBUG oslo_service.service [None req-b30c84a2-b146-4d0b-8208-942de4bf74be None None] cyborg.endpoint_override = None {{(pid=61962) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 502.763483] env[61962]: DEBUG oslo_service.service [None req-b30c84a2-b146-4d0b-8208-942de4bf74be None None] cyborg.insecure = False {{(pid=61962) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 502.763639] env[61962]: DEBUG oslo_service.service [None req-b30c84a2-b146-4d0b-8208-942de4bf74be None None] cyborg.keyfile = None {{(pid=61962) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 502.763795] env[61962]: DEBUG oslo_service.service [None req-b30c84a2-b146-4d0b-8208-942de4bf74be None None] cyborg.max_version = None {{(pid=61962) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 502.763952] env[61962]: DEBUG oslo_service.service [None req-b30c84a2-b146-4d0b-8208-942de4bf74be None None] cyborg.min_version = None {{(pid=61962) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 502.764128] env[61962]: DEBUG oslo_service.service [None req-b30c84a2-b146-4d0b-8208-942de4bf74be None None] cyborg.region_name = None {{(pid=61962) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 502.764291] env[61962]: DEBUG oslo_service.service [None req-b30c84a2-b146-4d0b-8208-942de4bf74be None None] cyborg.retriable_status_codes = None {{(pid=61962) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 502.764453] env[61962]: DEBUG oslo_service.service [None req-b30c84a2-b146-4d0b-8208-942de4bf74be None None] cyborg.service_name = None {{(pid=61962) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 502.764625] env[61962]: DEBUG oslo_service.service [None req-b30c84a2-b146-4d0b-8208-942de4bf74be None None] cyborg.service_type = accelerator {{(pid=61962) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 502.764792] env[61962]: DEBUG oslo_service.service [None req-b30c84a2-b146-4d0b-8208-942de4bf74be None None] cyborg.split_loggers = False {{(pid=61962) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 502.764954] env[61962]: DEBUG oslo_service.service [None req-b30c84a2-b146-4d0b-8208-942de4bf74be None None] cyborg.status_code_retries = None {{(pid=61962) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 502.765136] env[61962]: DEBUG oslo_service.service [None req-b30c84a2-b146-4d0b-8208-942de4bf74be None None] cyborg.status_code_retry_delay = None {{(pid=61962) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 502.765301] env[61962]: DEBUG oslo_service.service [None req-b30c84a2-b146-4d0b-8208-942de4bf74be None None] cyborg.timeout = None {{(pid=61962) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 502.765482] env[61962]: DEBUG oslo_service.service [None req-b30c84a2-b146-4d0b-8208-942de4bf74be None None] cyborg.valid_interfaces = ['internal', 'public'] {{(pid=61962) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 502.765642] env[61962]: DEBUG oslo_service.service [None req-b30c84a2-b146-4d0b-8208-942de4bf74be None None] cyborg.version = None {{(pid=61962) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 502.765825] env[61962]: DEBUG oslo_service.service [None req-b30c84a2-b146-4d0b-8208-942de4bf74be None None] database.backend = sqlalchemy {{(pid=61962) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 502.765994] env[61962]: DEBUG oslo_service.service [None req-b30c84a2-b146-4d0b-8208-942de4bf74be None None] database.connection = **** {{(pid=61962) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 502.766174] env[61962]: DEBUG oslo_service.service [None req-b30c84a2-b146-4d0b-8208-942de4bf74be None None] database.connection_debug = 0 {{(pid=61962) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 502.766350] env[61962]: DEBUG oslo_service.service [None req-b30c84a2-b146-4d0b-8208-942de4bf74be None None] database.connection_parameters = {{(pid=61962) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 502.766514] env[61962]: DEBUG oslo_service.service [None req-b30c84a2-b146-4d0b-8208-942de4bf74be None None] database.connection_recycle_time = 3600 {{(pid=61962) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 502.766676] env[61962]: DEBUG oslo_service.service [None req-b30c84a2-b146-4d0b-8208-942de4bf74be None None] database.connection_trace = False {{(pid=61962) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 502.766838] env[61962]: DEBUG oslo_service.service [None req-b30c84a2-b146-4d0b-8208-942de4bf74be None None] database.db_inc_retry_interval = True {{(pid=61962) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 502.767048] env[61962]: DEBUG oslo_service.service [None req-b30c84a2-b146-4d0b-8208-942de4bf74be None None] database.db_max_retries = 20 {{(pid=61962) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 502.767221] env[61962]: DEBUG oslo_service.service [None req-b30c84a2-b146-4d0b-8208-942de4bf74be None None] database.db_max_retry_interval = 10 {{(pid=61962) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 502.767384] env[61962]: DEBUG oslo_service.service [None req-b30c84a2-b146-4d0b-8208-942de4bf74be None None] database.db_retry_interval = 1 {{(pid=61962) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 502.767776] env[61962]: DEBUG oslo_service.service [None req-b30c84a2-b146-4d0b-8208-942de4bf74be None None] database.max_overflow = 50 {{(pid=61962) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 502.767776] env[61962]: DEBUG oslo_service.service [None req-b30c84a2-b146-4d0b-8208-942de4bf74be None None] database.max_pool_size = 5 {{(pid=61962) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 502.767861] env[61962]: DEBUG oslo_service.service [None req-b30c84a2-b146-4d0b-8208-942de4bf74be None None] database.max_retries = 10 {{(pid=61962) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 502.768018] env[61962]: DEBUG oslo_service.service [None req-b30c84a2-b146-4d0b-8208-942de4bf74be None None] database.mysql_sql_mode = TRADITIONAL {{(pid=61962) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 502.770284] env[61962]: DEBUG oslo_service.service [None req-b30c84a2-b146-4d0b-8208-942de4bf74be None None] database.mysql_wsrep_sync_wait = None {{(pid=61962) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 502.770284] env[61962]: DEBUG oslo_service.service [None req-b30c84a2-b146-4d0b-8208-942de4bf74be None None] database.pool_timeout = None {{(pid=61962) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 502.770284] env[61962]: DEBUG oslo_service.service [None req-b30c84a2-b146-4d0b-8208-942de4bf74be None None] database.retry_interval = 10 {{(pid=61962) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 502.770284] env[61962]: DEBUG oslo_service.service [None req-b30c84a2-b146-4d0b-8208-942de4bf74be None None] database.slave_connection = **** {{(pid=61962) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 502.770284] env[61962]: DEBUG oslo_service.service [None req-b30c84a2-b146-4d0b-8208-942de4bf74be None None] database.sqlite_synchronous = True {{(pid=61962) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 502.770284] env[61962]: DEBUG oslo_service.service [None req-b30c84a2-b146-4d0b-8208-942de4bf74be None None] database.use_db_reconnect = False {{(pid=61962) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 502.770543] env[61962]: DEBUG oslo_service.service [None req-b30c84a2-b146-4d0b-8208-942de4bf74be None None] api_database.backend = sqlalchemy {{(pid=61962) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 502.770543] env[61962]: DEBUG oslo_service.service [None req-b30c84a2-b146-4d0b-8208-942de4bf74be None None] api_database.connection = **** {{(pid=61962) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 502.770543] env[61962]: DEBUG oslo_service.service [None req-b30c84a2-b146-4d0b-8208-942de4bf74be None None] api_database.connection_debug = 0 {{(pid=61962) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 502.770543] env[61962]: DEBUG oslo_service.service [None req-b30c84a2-b146-4d0b-8208-942de4bf74be None None] api_database.connection_parameters = {{(pid=61962) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 502.770543] env[61962]: DEBUG oslo_service.service [None req-b30c84a2-b146-4d0b-8208-942de4bf74be None None] api_database.connection_recycle_time = 3600 {{(pid=61962) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 502.770543] env[61962]: DEBUG oslo_service.service [None req-b30c84a2-b146-4d0b-8208-942de4bf74be None None] api_database.connection_trace = False {{(pid=61962) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 502.770739] env[61962]: DEBUG oslo_service.service [None req-b30c84a2-b146-4d0b-8208-942de4bf74be None None] api_database.db_inc_retry_interval = True {{(pid=61962) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 502.770739] env[61962]: DEBUG oslo_service.service [None req-b30c84a2-b146-4d0b-8208-942de4bf74be None None] api_database.db_max_retries = 20 {{(pid=61962) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 502.770739] env[61962]: DEBUG oslo_service.service [None req-b30c84a2-b146-4d0b-8208-942de4bf74be None None] api_database.db_max_retry_interval = 10 {{(pid=61962) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 502.770739] env[61962]: DEBUG oslo_service.service [None req-b30c84a2-b146-4d0b-8208-942de4bf74be None None] api_database.db_retry_interval = 1 {{(pid=61962) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 502.770739] env[61962]: DEBUG oslo_service.service [None req-b30c84a2-b146-4d0b-8208-942de4bf74be None None] api_database.max_overflow = 50 {{(pid=61962) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 502.770908] env[61962]: DEBUG oslo_service.service [None req-b30c84a2-b146-4d0b-8208-942de4bf74be None None] api_database.max_pool_size = 5 {{(pid=61962) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 502.771059] env[61962]: DEBUG oslo_service.service [None req-b30c84a2-b146-4d0b-8208-942de4bf74be None None] api_database.max_retries = 10 {{(pid=61962) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 502.771235] env[61962]: DEBUG oslo_service.service [None req-b30c84a2-b146-4d0b-8208-942de4bf74be None None] api_database.mysql_sql_mode = TRADITIONAL {{(pid=61962) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 502.771398] env[61962]: DEBUG oslo_service.service [None req-b30c84a2-b146-4d0b-8208-942de4bf74be None None] api_database.mysql_wsrep_sync_wait = None {{(pid=61962) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 502.771555] env[61962]: DEBUG oslo_service.service [None req-b30c84a2-b146-4d0b-8208-942de4bf74be None None] api_database.pool_timeout = None {{(pid=61962) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 502.771718] env[61962]: DEBUG oslo_service.service [None req-b30c84a2-b146-4d0b-8208-942de4bf74be None None] api_database.retry_interval = 10 {{(pid=61962) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 502.771880] env[61962]: DEBUG oslo_service.service [None req-b30c84a2-b146-4d0b-8208-942de4bf74be None None] api_database.slave_connection = **** {{(pid=61962) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 502.772055] env[61962]: DEBUG oslo_service.service [None req-b30c84a2-b146-4d0b-8208-942de4bf74be None None] api_database.sqlite_synchronous = True {{(pid=61962) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 502.772234] env[61962]: DEBUG oslo_service.service [None req-b30c84a2-b146-4d0b-8208-942de4bf74be None None] devices.enabled_mdev_types = [] {{(pid=61962) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 502.772412] env[61962]: DEBUG oslo_service.service [None req-b30c84a2-b146-4d0b-8208-942de4bf74be None None] ephemeral_storage_encryption.cipher = aes-xts-plain64 {{(pid=61962) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 502.772594] env[61962]: DEBUG oslo_service.service [None req-b30c84a2-b146-4d0b-8208-942de4bf74be None None] ephemeral_storage_encryption.default_format = luks {{(pid=61962) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 502.772790] env[61962]: DEBUG oslo_service.service [None req-b30c84a2-b146-4d0b-8208-942de4bf74be None None] ephemeral_storage_encryption.enabled = False {{(pid=61962) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 502.773061] env[61962]: DEBUG oslo_service.service [None req-b30c84a2-b146-4d0b-8208-942de4bf74be None None] ephemeral_storage_encryption.key_size = 512 {{(pid=61962) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 502.773325] env[61962]: DEBUG oslo_service.service [None req-b30c84a2-b146-4d0b-8208-942de4bf74be None None] glance.api_servers = None {{(pid=61962) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 502.773574] env[61962]: DEBUG oslo_service.service [None req-b30c84a2-b146-4d0b-8208-942de4bf74be None None] glance.cafile = None {{(pid=61962) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 502.773817] env[61962]: DEBUG oslo_service.service [None req-b30c84a2-b146-4d0b-8208-942de4bf74be None None] glance.certfile = None {{(pid=61962) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 502.774073] env[61962]: DEBUG oslo_service.service [None req-b30c84a2-b146-4d0b-8208-942de4bf74be None None] glance.collect_timing = False {{(pid=61962) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 502.774316] env[61962]: DEBUG oslo_service.service [None req-b30c84a2-b146-4d0b-8208-942de4bf74be None None] glance.connect_retries = None {{(pid=61962) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 502.774556] env[61962]: DEBUG oslo_service.service [None req-b30c84a2-b146-4d0b-8208-942de4bf74be None None] glance.connect_retry_delay = None {{(pid=61962) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 502.774803] env[61962]: DEBUG oslo_service.service [None req-b30c84a2-b146-4d0b-8208-942de4bf74be None None] glance.debug = False {{(pid=61962) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 502.775069] env[61962]: DEBUG oslo_service.service [None req-b30c84a2-b146-4d0b-8208-942de4bf74be None None] glance.default_trusted_certificate_ids = [] {{(pid=61962) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 502.775261] env[61962]: DEBUG oslo_service.service [None req-b30c84a2-b146-4d0b-8208-942de4bf74be None None] glance.enable_certificate_validation = False {{(pid=61962) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 502.775433] env[61962]: DEBUG oslo_service.service [None req-b30c84a2-b146-4d0b-8208-942de4bf74be None None] glance.enable_rbd_download = False {{(pid=61962) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 502.775598] env[61962]: DEBUG oslo_service.service [None req-b30c84a2-b146-4d0b-8208-942de4bf74be None None] glance.endpoint_override = None {{(pid=61962) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 502.775771] env[61962]: DEBUG oslo_service.service [None req-b30c84a2-b146-4d0b-8208-942de4bf74be None None] glance.insecure = False {{(pid=61962) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 502.775941] env[61962]: DEBUG oslo_service.service [None req-b30c84a2-b146-4d0b-8208-942de4bf74be None None] glance.keyfile = None {{(pid=61962) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 502.776116] env[61962]: DEBUG oslo_service.service [None req-b30c84a2-b146-4d0b-8208-942de4bf74be None None] glance.max_version = None {{(pid=61962) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 502.776279] env[61962]: DEBUG oslo_service.service [None req-b30c84a2-b146-4d0b-8208-942de4bf74be None None] glance.min_version = None {{(pid=61962) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 502.776446] env[61962]: DEBUG oslo_service.service [None req-b30c84a2-b146-4d0b-8208-942de4bf74be None None] glance.num_retries = 3 {{(pid=61962) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 502.776618] env[61962]: DEBUG oslo_service.service [None req-b30c84a2-b146-4d0b-8208-942de4bf74be None None] glance.rbd_ceph_conf = {{(pid=61962) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 502.776816] env[61962]: DEBUG oslo_service.service [None req-b30c84a2-b146-4d0b-8208-942de4bf74be None None] glance.rbd_connect_timeout = 5 {{(pid=61962) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 502.777018] env[61962]: DEBUG oslo_service.service [None req-b30c84a2-b146-4d0b-8208-942de4bf74be None None] glance.rbd_pool = {{(pid=61962) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 502.777197] env[61962]: DEBUG oslo_service.service [None req-b30c84a2-b146-4d0b-8208-942de4bf74be None None] glance.rbd_user = {{(pid=61962) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 502.777363] env[61962]: DEBUG oslo_service.service [None req-b30c84a2-b146-4d0b-8208-942de4bf74be None None] glance.region_name = None {{(pid=61962) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 502.777527] env[61962]: DEBUG oslo_service.service [None req-b30c84a2-b146-4d0b-8208-942de4bf74be None None] glance.retriable_status_codes = None {{(pid=61962) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 502.777691] env[61962]: DEBUG oslo_service.service [None req-b30c84a2-b146-4d0b-8208-942de4bf74be None None] glance.service_name = None {{(pid=61962) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 502.777866] env[61962]: DEBUG oslo_service.service [None req-b30c84a2-b146-4d0b-8208-942de4bf74be None None] glance.service_type = image {{(pid=61962) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 502.778041] env[61962]: DEBUG oslo_service.service [None req-b30c84a2-b146-4d0b-8208-942de4bf74be None None] glance.split_loggers = False {{(pid=61962) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 502.778208] env[61962]: DEBUG oslo_service.service [None req-b30c84a2-b146-4d0b-8208-942de4bf74be None None] glance.status_code_retries = None {{(pid=61962) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 502.778370] env[61962]: DEBUG oslo_service.service [None req-b30c84a2-b146-4d0b-8208-942de4bf74be None None] glance.status_code_retry_delay = None {{(pid=61962) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 502.778532] env[61962]: DEBUG oslo_service.service [None req-b30c84a2-b146-4d0b-8208-942de4bf74be None None] glance.timeout = None {{(pid=61962) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 502.778715] env[61962]: DEBUG oslo_service.service [None req-b30c84a2-b146-4d0b-8208-942de4bf74be None None] glance.valid_interfaces = ['internal', 'public'] {{(pid=61962) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 502.778886] env[61962]: DEBUG oslo_service.service [None req-b30c84a2-b146-4d0b-8208-942de4bf74be None None] glance.verify_glance_signatures = False {{(pid=61962) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 502.779054] env[61962]: DEBUG oslo_service.service [None req-b30c84a2-b146-4d0b-8208-942de4bf74be None None] glance.version = None {{(pid=61962) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 502.779228] env[61962]: DEBUG oslo_service.service [None req-b30c84a2-b146-4d0b-8208-942de4bf74be None None] guestfs.debug = False {{(pid=61962) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 502.779406] env[61962]: DEBUG oslo_service.service [None req-b30c84a2-b146-4d0b-8208-942de4bf74be None None] mks.enabled = False {{(pid=61962) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 502.779772] env[61962]: DEBUG oslo_service.service [None req-b30c84a2-b146-4d0b-8208-942de4bf74be None None] mks.mksproxy_base_url = http://127.0.0.1:6090/ {{(pid=61962) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 502.779966] env[61962]: DEBUG oslo_service.service [None req-b30c84a2-b146-4d0b-8208-942de4bf74be None None] image_cache.manager_interval = 2400 {{(pid=61962) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 502.780152] env[61962]: DEBUG oslo_service.service [None req-b30c84a2-b146-4d0b-8208-942de4bf74be None None] image_cache.precache_concurrency = 1 {{(pid=61962) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 502.780327] env[61962]: DEBUG oslo_service.service [None req-b30c84a2-b146-4d0b-8208-942de4bf74be None None] image_cache.remove_unused_base_images = True {{(pid=61962) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 502.780499] env[61962]: DEBUG oslo_service.service [None req-b30c84a2-b146-4d0b-8208-942de4bf74be None None] image_cache.remove_unused_original_minimum_age_seconds = 86400 {{(pid=61962) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 502.780691] env[61962]: DEBUG oslo_service.service [None req-b30c84a2-b146-4d0b-8208-942de4bf74be None None] image_cache.remove_unused_resized_minimum_age_seconds = 3600 {{(pid=61962) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 502.780906] env[61962]: DEBUG oslo_service.service [None req-b30c84a2-b146-4d0b-8208-942de4bf74be None None] image_cache.subdirectory_name = _base {{(pid=61962) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 502.781105] env[61962]: DEBUG oslo_service.service [None req-b30c84a2-b146-4d0b-8208-942de4bf74be None None] ironic.api_max_retries = 60 {{(pid=61962) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 502.781277] env[61962]: DEBUG oslo_service.service [None req-b30c84a2-b146-4d0b-8208-942de4bf74be None None] ironic.api_retry_interval = 2 {{(pid=61962) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 502.781438] env[61962]: DEBUG oslo_service.service [None req-b30c84a2-b146-4d0b-8208-942de4bf74be None None] ironic.auth_section = None {{(pid=61962) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 502.781603] env[61962]: DEBUG oslo_service.service [None req-b30c84a2-b146-4d0b-8208-942de4bf74be None None] ironic.auth_type = None {{(pid=61962) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 502.781766] env[61962]: DEBUG oslo_service.service [None req-b30c84a2-b146-4d0b-8208-942de4bf74be None None] ironic.cafile = None {{(pid=61962) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 502.781933] env[61962]: DEBUG oslo_service.service [None req-b30c84a2-b146-4d0b-8208-942de4bf74be None None] ironic.certfile = None {{(pid=61962) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 502.782110] env[61962]: DEBUG oslo_service.service [None req-b30c84a2-b146-4d0b-8208-942de4bf74be None None] ironic.collect_timing = False {{(pid=61962) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 502.782281] env[61962]: DEBUG oslo_service.service [None req-b30c84a2-b146-4d0b-8208-942de4bf74be None None] ironic.conductor_group = None {{(pid=61962) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 502.782441] env[61962]: DEBUG oslo_service.service [None req-b30c84a2-b146-4d0b-8208-942de4bf74be None None] ironic.connect_retries = None {{(pid=61962) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 502.782604] env[61962]: DEBUG oslo_service.service [None req-b30c84a2-b146-4d0b-8208-942de4bf74be None None] ironic.connect_retry_delay = None {{(pid=61962) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 502.782853] env[61962]: DEBUG oslo_service.service [None req-b30c84a2-b146-4d0b-8208-942de4bf74be None None] ironic.endpoint_override = None {{(pid=61962) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 502.782964] env[61962]: DEBUG oslo_service.service [None req-b30c84a2-b146-4d0b-8208-942de4bf74be None None] ironic.insecure = False {{(pid=61962) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 502.783145] env[61962]: DEBUG oslo_service.service [None req-b30c84a2-b146-4d0b-8208-942de4bf74be None None] ironic.keyfile = None {{(pid=61962) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 502.783308] env[61962]: DEBUG oslo_service.service [None req-b30c84a2-b146-4d0b-8208-942de4bf74be None None] ironic.max_version = None {{(pid=61962) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 502.783469] env[61962]: DEBUG oslo_service.service [None req-b30c84a2-b146-4d0b-8208-942de4bf74be None None] ironic.min_version = None {{(pid=61962) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 502.783638] env[61962]: DEBUG oslo_service.service [None req-b30c84a2-b146-4d0b-8208-942de4bf74be None None] ironic.peer_list = [] {{(pid=61962) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 502.783800] env[61962]: DEBUG oslo_service.service [None req-b30c84a2-b146-4d0b-8208-942de4bf74be None None] ironic.region_name = None {{(pid=61962) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 502.783961] env[61962]: DEBUG oslo_service.service [None req-b30c84a2-b146-4d0b-8208-942de4bf74be None None] ironic.retriable_status_codes = None {{(pid=61962) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 502.784141] env[61962]: DEBUG oslo_service.service [None req-b30c84a2-b146-4d0b-8208-942de4bf74be None None] ironic.serial_console_state_timeout = 10 {{(pid=61962) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 502.784305] env[61962]: DEBUG oslo_service.service [None req-b30c84a2-b146-4d0b-8208-942de4bf74be None None] ironic.service_name = None {{(pid=61962) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 502.784478] env[61962]: DEBUG oslo_service.service [None req-b30c84a2-b146-4d0b-8208-942de4bf74be None None] ironic.service_type = baremetal {{(pid=61962) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 502.784641] env[61962]: DEBUG oslo_service.service [None req-b30c84a2-b146-4d0b-8208-942de4bf74be None None] ironic.shard = None {{(pid=61962) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 502.784808] env[61962]: DEBUG oslo_service.service [None req-b30c84a2-b146-4d0b-8208-942de4bf74be None None] ironic.split_loggers = False {{(pid=61962) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 502.784970] env[61962]: DEBUG oslo_service.service [None req-b30c84a2-b146-4d0b-8208-942de4bf74be None None] ironic.status_code_retries = None {{(pid=61962) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 502.785146] env[61962]: DEBUG oslo_service.service [None req-b30c84a2-b146-4d0b-8208-942de4bf74be None None] ironic.status_code_retry_delay = None {{(pid=61962) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 502.785310] env[61962]: DEBUG oslo_service.service [None req-b30c84a2-b146-4d0b-8208-942de4bf74be None None] ironic.timeout = None {{(pid=61962) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 502.785494] env[61962]: DEBUG oslo_service.service [None req-b30c84a2-b146-4d0b-8208-942de4bf74be None None] ironic.valid_interfaces = ['internal', 'public'] {{(pid=61962) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 502.785658] env[61962]: DEBUG oslo_service.service [None req-b30c84a2-b146-4d0b-8208-942de4bf74be None None] ironic.version = None {{(pid=61962) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 502.785845] env[61962]: DEBUG oslo_service.service [None req-b30c84a2-b146-4d0b-8208-942de4bf74be None None] key_manager.backend = nova.keymgr.conf_key_mgr.ConfKeyManager {{(pid=61962) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 502.786034] env[61962]: DEBUG oslo_service.service [None req-b30c84a2-b146-4d0b-8208-942de4bf74be None None] key_manager.fixed_key = **** {{(pid=61962) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 502.786225] env[61962]: DEBUG oslo_service.service [None req-b30c84a2-b146-4d0b-8208-942de4bf74be None None] barbican.auth_endpoint = http://localhost/identity/v3 {{(pid=61962) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 502.786392] env[61962]: DEBUG oslo_service.service [None req-b30c84a2-b146-4d0b-8208-942de4bf74be None None] barbican.barbican_api_version = None {{(pid=61962) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 502.786554] env[61962]: DEBUG oslo_service.service [None req-b30c84a2-b146-4d0b-8208-942de4bf74be None None] barbican.barbican_endpoint = None {{(pid=61962) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 502.786729] env[61962]: DEBUG oslo_service.service [None req-b30c84a2-b146-4d0b-8208-942de4bf74be None None] barbican.barbican_endpoint_type = public {{(pid=61962) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 502.786892] env[61962]: DEBUG oslo_service.service [None req-b30c84a2-b146-4d0b-8208-942de4bf74be None None] barbican.barbican_region_name = None {{(pid=61962) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 502.787061] env[61962]: DEBUG oslo_service.service [None req-b30c84a2-b146-4d0b-8208-942de4bf74be None None] barbican.cafile = None {{(pid=61962) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 502.787228] env[61962]: DEBUG oslo_service.service [None req-b30c84a2-b146-4d0b-8208-942de4bf74be None None] barbican.certfile = None {{(pid=61962) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 502.787396] env[61962]: DEBUG oslo_service.service [None req-b30c84a2-b146-4d0b-8208-942de4bf74be None None] barbican.collect_timing = False {{(pid=61962) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 502.787561] env[61962]: DEBUG oslo_service.service [None req-b30c84a2-b146-4d0b-8208-942de4bf74be None None] barbican.insecure = False {{(pid=61962) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 502.787723] env[61962]: DEBUG oslo_service.service [None req-b30c84a2-b146-4d0b-8208-942de4bf74be None None] barbican.keyfile = None {{(pid=61962) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 502.787892] env[61962]: DEBUG oslo_service.service [None req-b30c84a2-b146-4d0b-8208-942de4bf74be None None] barbican.number_of_retries = 60 {{(pid=61962) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 502.788065] env[61962]: DEBUG oslo_service.service [None req-b30c84a2-b146-4d0b-8208-942de4bf74be None None] barbican.retry_delay = 1 {{(pid=61962) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 502.788234] env[61962]: DEBUG oslo_service.service [None req-b30c84a2-b146-4d0b-8208-942de4bf74be None None] barbican.send_service_user_token = False {{(pid=61962) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 502.788399] env[61962]: DEBUG oslo_service.service [None req-b30c84a2-b146-4d0b-8208-942de4bf74be None None] barbican.split_loggers = False {{(pid=61962) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 502.788558] env[61962]: DEBUG oslo_service.service [None req-b30c84a2-b146-4d0b-8208-942de4bf74be None None] barbican.timeout = None {{(pid=61962) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 502.788722] env[61962]: DEBUG oslo_service.service [None req-b30c84a2-b146-4d0b-8208-942de4bf74be None None] barbican.verify_ssl = True {{(pid=61962) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 502.788888] env[61962]: DEBUG oslo_service.service [None req-b30c84a2-b146-4d0b-8208-942de4bf74be None None] barbican.verify_ssl_path = None {{(pid=61962) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 502.789067] env[61962]: DEBUG oslo_service.service [None req-b30c84a2-b146-4d0b-8208-942de4bf74be None None] barbican_service_user.auth_section = None {{(pid=61962) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 502.789236] env[61962]: DEBUG oslo_service.service [None req-b30c84a2-b146-4d0b-8208-942de4bf74be None None] barbican_service_user.auth_type = None {{(pid=61962) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 502.789398] env[61962]: DEBUG oslo_service.service [None req-b30c84a2-b146-4d0b-8208-942de4bf74be None None] barbican_service_user.cafile = None {{(pid=61962) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 502.789558] env[61962]: DEBUG oslo_service.service [None req-b30c84a2-b146-4d0b-8208-942de4bf74be None None] barbican_service_user.certfile = None {{(pid=61962) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 502.789726] env[61962]: DEBUG oslo_service.service [None req-b30c84a2-b146-4d0b-8208-942de4bf74be None None] barbican_service_user.collect_timing = False {{(pid=61962) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 502.789892] env[61962]: DEBUG oslo_service.service [None req-b30c84a2-b146-4d0b-8208-942de4bf74be None None] barbican_service_user.insecure = False {{(pid=61962) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 502.790063] env[61962]: DEBUG oslo_service.service [None req-b30c84a2-b146-4d0b-8208-942de4bf74be None None] barbican_service_user.keyfile = None {{(pid=61962) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 502.790232] env[61962]: DEBUG oslo_service.service [None req-b30c84a2-b146-4d0b-8208-942de4bf74be None None] barbican_service_user.split_loggers = False {{(pid=61962) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 502.790392] env[61962]: DEBUG oslo_service.service [None req-b30c84a2-b146-4d0b-8208-942de4bf74be None None] barbican_service_user.timeout = None {{(pid=61962) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 502.790562] env[61962]: DEBUG oslo_service.service [None req-b30c84a2-b146-4d0b-8208-942de4bf74be None None] vault.approle_role_id = None {{(pid=61962) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 502.790773] env[61962]: DEBUG oslo_service.service [None req-b30c84a2-b146-4d0b-8208-942de4bf74be None None] vault.approle_secret_id = None {{(pid=61962) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 502.790955] env[61962]: DEBUG oslo_service.service [None req-b30c84a2-b146-4d0b-8208-942de4bf74be None None] vault.cafile = None {{(pid=61962) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 502.791136] env[61962]: DEBUG oslo_service.service [None req-b30c84a2-b146-4d0b-8208-942de4bf74be None None] vault.certfile = None {{(pid=61962) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 502.791308] env[61962]: DEBUG oslo_service.service [None req-b30c84a2-b146-4d0b-8208-942de4bf74be None None] vault.collect_timing = False {{(pid=61962) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 502.791474] env[61962]: DEBUG oslo_service.service [None req-b30c84a2-b146-4d0b-8208-942de4bf74be None None] vault.insecure = False {{(pid=61962) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 502.791634] env[61962]: DEBUG oslo_service.service [None req-b30c84a2-b146-4d0b-8208-942de4bf74be None None] vault.keyfile = None {{(pid=61962) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 502.791810] env[61962]: DEBUG oslo_service.service [None req-b30c84a2-b146-4d0b-8208-942de4bf74be None None] vault.kv_mountpoint = secret {{(pid=61962) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 502.791974] env[61962]: DEBUG oslo_service.service [None req-b30c84a2-b146-4d0b-8208-942de4bf74be None None] vault.kv_path = None {{(pid=61962) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 502.792156] env[61962]: DEBUG oslo_service.service [None req-b30c84a2-b146-4d0b-8208-942de4bf74be None None] vault.kv_version = 2 {{(pid=61962) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 502.792321] env[61962]: DEBUG oslo_service.service [None req-b30c84a2-b146-4d0b-8208-942de4bf74be None None] vault.namespace = None {{(pid=61962) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 502.792482] env[61962]: DEBUG oslo_service.service [None req-b30c84a2-b146-4d0b-8208-942de4bf74be None None] vault.root_token_id = None {{(pid=61962) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 502.792649] env[61962]: DEBUG oslo_service.service [None req-b30c84a2-b146-4d0b-8208-942de4bf74be None None] vault.split_loggers = False {{(pid=61962) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 502.792892] env[61962]: DEBUG oslo_service.service [None req-b30c84a2-b146-4d0b-8208-942de4bf74be None None] vault.ssl_ca_crt_file = None {{(pid=61962) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 502.793081] env[61962]: DEBUG oslo_service.service [None req-b30c84a2-b146-4d0b-8208-942de4bf74be None None] vault.timeout = None {{(pid=61962) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 502.793185] env[61962]: DEBUG oslo_service.service [None req-b30c84a2-b146-4d0b-8208-942de4bf74be None None] vault.use_ssl = False {{(pid=61962) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 502.793362] env[61962]: DEBUG oslo_service.service [None req-b30c84a2-b146-4d0b-8208-942de4bf74be None None] vault.vault_url = http://127.0.0.1:8200 {{(pid=61962) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 502.793536] env[61962]: DEBUG oslo_service.service [None req-b30c84a2-b146-4d0b-8208-942de4bf74be None None] keystone.auth_section = None {{(pid=61962) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 502.793772] env[61962]: DEBUG oslo_service.service [None req-b30c84a2-b146-4d0b-8208-942de4bf74be None None] keystone.auth_type = None {{(pid=61962) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 502.793888] env[61962]: DEBUG oslo_service.service [None req-b30c84a2-b146-4d0b-8208-942de4bf74be None None] keystone.cafile = None {{(pid=61962) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 502.794064] env[61962]: DEBUG oslo_service.service [None req-b30c84a2-b146-4d0b-8208-942de4bf74be None None] keystone.certfile = None {{(pid=61962) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 502.794235] env[61962]: DEBUG oslo_service.service [None req-b30c84a2-b146-4d0b-8208-942de4bf74be None None] keystone.collect_timing = False {{(pid=61962) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 502.794398] env[61962]: DEBUG oslo_service.service [None req-b30c84a2-b146-4d0b-8208-942de4bf74be None None] keystone.connect_retries = None {{(pid=61962) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 502.794558] env[61962]: DEBUG oslo_service.service [None req-b30c84a2-b146-4d0b-8208-942de4bf74be None None] keystone.connect_retry_delay = None {{(pid=61962) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 502.794717] env[61962]: DEBUG oslo_service.service [None req-b30c84a2-b146-4d0b-8208-942de4bf74be None None] keystone.endpoint_override = None {{(pid=61962) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 502.794884] env[61962]: DEBUG oslo_service.service [None req-b30c84a2-b146-4d0b-8208-942de4bf74be None None] keystone.insecure = False {{(pid=61962) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 502.795055] env[61962]: DEBUG oslo_service.service [None req-b30c84a2-b146-4d0b-8208-942de4bf74be None None] keystone.keyfile = None {{(pid=61962) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 502.795218] env[61962]: DEBUG oslo_service.service [None req-b30c84a2-b146-4d0b-8208-942de4bf74be None None] keystone.max_version = None {{(pid=61962) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 502.795378] env[61962]: DEBUG oslo_service.service [None req-b30c84a2-b146-4d0b-8208-942de4bf74be None None] keystone.min_version = None {{(pid=61962) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 502.795537] env[61962]: DEBUG oslo_service.service [None req-b30c84a2-b146-4d0b-8208-942de4bf74be None None] keystone.region_name = None {{(pid=61962) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 502.795700] env[61962]: DEBUG oslo_service.service [None req-b30c84a2-b146-4d0b-8208-942de4bf74be None None] keystone.retriable_status_codes = None {{(pid=61962) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 502.795862] env[61962]: DEBUG oslo_service.service [None req-b30c84a2-b146-4d0b-8208-942de4bf74be None None] keystone.service_name = None {{(pid=61962) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 502.796045] env[61962]: DEBUG oslo_service.service [None req-b30c84a2-b146-4d0b-8208-942de4bf74be None None] keystone.service_type = identity {{(pid=61962) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 502.796217] env[61962]: DEBUG oslo_service.service [None req-b30c84a2-b146-4d0b-8208-942de4bf74be None None] keystone.split_loggers = False {{(pid=61962) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 502.796377] env[61962]: DEBUG oslo_service.service [None req-b30c84a2-b146-4d0b-8208-942de4bf74be None None] keystone.status_code_retries = None {{(pid=61962) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 502.796540] env[61962]: DEBUG oslo_service.service [None req-b30c84a2-b146-4d0b-8208-942de4bf74be None None] keystone.status_code_retry_delay = None {{(pid=61962) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 502.796700] env[61962]: DEBUG oslo_service.service [None req-b30c84a2-b146-4d0b-8208-942de4bf74be None None] keystone.timeout = None {{(pid=61962) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 502.796888] env[61962]: DEBUG oslo_service.service [None req-b30c84a2-b146-4d0b-8208-942de4bf74be None None] keystone.valid_interfaces = ['internal', 'public'] {{(pid=61962) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 502.797061] env[61962]: DEBUG oslo_service.service [None req-b30c84a2-b146-4d0b-8208-942de4bf74be None None] keystone.version = None {{(pid=61962) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 502.797267] env[61962]: DEBUG oslo_service.service [None req-b30c84a2-b146-4d0b-8208-942de4bf74be None None] libvirt.connection_uri = {{(pid=61962) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 502.797431] env[61962]: DEBUG oslo_service.service [None req-b30c84a2-b146-4d0b-8208-942de4bf74be None None] libvirt.cpu_mode = None {{(pid=61962) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 502.797600] env[61962]: DEBUG oslo_service.service [None req-b30c84a2-b146-4d0b-8208-942de4bf74be None None] libvirt.cpu_model_extra_flags = [] {{(pid=61962) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 502.797771] env[61962]: DEBUG oslo_service.service [None req-b30c84a2-b146-4d0b-8208-942de4bf74be None None] libvirt.cpu_models = [] {{(pid=61962) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 502.797943] env[61962]: DEBUG oslo_service.service [None req-b30c84a2-b146-4d0b-8208-942de4bf74be None None] libvirt.cpu_power_governor_high = performance {{(pid=61962) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 502.798125] env[61962]: DEBUG oslo_service.service [None req-b30c84a2-b146-4d0b-8208-942de4bf74be None None] libvirt.cpu_power_governor_low = powersave {{(pid=61962) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 502.798292] env[61962]: DEBUG oslo_service.service [None req-b30c84a2-b146-4d0b-8208-942de4bf74be None None] libvirt.cpu_power_management = False {{(pid=61962) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 502.798466] env[61962]: DEBUG oslo_service.service [None req-b30c84a2-b146-4d0b-8208-942de4bf74be None None] libvirt.cpu_power_management_strategy = cpu_state {{(pid=61962) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 502.798633] env[61962]: DEBUG oslo_service.service [None req-b30c84a2-b146-4d0b-8208-942de4bf74be None None] libvirt.device_detach_attempts = 8 {{(pid=61962) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 502.798798] env[61962]: DEBUG oslo_service.service [None req-b30c84a2-b146-4d0b-8208-942de4bf74be None None] libvirt.device_detach_timeout = 20 {{(pid=61962) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 502.798964] env[61962]: DEBUG oslo_service.service [None req-b30c84a2-b146-4d0b-8208-942de4bf74be None None] libvirt.disk_cachemodes = [] {{(pid=61962) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 502.799138] env[61962]: DEBUG oslo_service.service [None req-b30c84a2-b146-4d0b-8208-942de4bf74be None None] libvirt.disk_prefix = None {{(pid=61962) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 502.799305] env[61962]: DEBUG oslo_service.service [None req-b30c84a2-b146-4d0b-8208-942de4bf74be None None] libvirt.enabled_perf_events = [] {{(pid=61962) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 502.799469] env[61962]: DEBUG oslo_service.service [None req-b30c84a2-b146-4d0b-8208-942de4bf74be None None] libvirt.file_backed_memory = 0 {{(pid=61962) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 502.799632] env[61962]: DEBUG oslo_service.service [None req-b30c84a2-b146-4d0b-8208-942de4bf74be None None] libvirt.gid_maps = [] {{(pid=61962) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 502.799791] env[61962]: DEBUG oslo_service.service [None req-b30c84a2-b146-4d0b-8208-942de4bf74be None None] libvirt.hw_disk_discard = None {{(pid=61962) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 502.799955] env[61962]: DEBUG oslo_service.service [None req-b30c84a2-b146-4d0b-8208-942de4bf74be None None] libvirt.hw_machine_type = None {{(pid=61962) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 502.800140] env[61962]: DEBUG oslo_service.service [None req-b30c84a2-b146-4d0b-8208-942de4bf74be None None] libvirt.images_rbd_ceph_conf = {{(pid=61962) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 502.800308] env[61962]: DEBUG oslo_service.service [None req-b30c84a2-b146-4d0b-8208-942de4bf74be None None] libvirt.images_rbd_glance_copy_poll_interval = 15 {{(pid=61962) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 502.800476] env[61962]: DEBUG oslo_service.service [None req-b30c84a2-b146-4d0b-8208-942de4bf74be None None] libvirt.images_rbd_glance_copy_timeout = 600 {{(pid=61962) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 502.800638] env[61962]: DEBUG oslo_service.service [None req-b30c84a2-b146-4d0b-8208-942de4bf74be None None] libvirt.images_rbd_glance_store_name = {{(pid=61962) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 502.800846] env[61962]: DEBUG oslo_service.service [None req-b30c84a2-b146-4d0b-8208-942de4bf74be None None] libvirt.images_rbd_pool = rbd {{(pid=61962) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 502.801043] env[61962]: DEBUG oslo_service.service [None req-b30c84a2-b146-4d0b-8208-942de4bf74be None None] libvirt.images_type = default {{(pid=61962) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 502.801211] env[61962]: DEBUG oslo_service.service [None req-b30c84a2-b146-4d0b-8208-942de4bf74be None None] libvirt.images_volume_group = None {{(pid=61962) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 502.801380] env[61962]: DEBUG oslo_service.service [None req-b30c84a2-b146-4d0b-8208-942de4bf74be None None] libvirt.inject_key = False {{(pid=61962) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 502.801546] env[61962]: DEBUG oslo_service.service [None req-b30c84a2-b146-4d0b-8208-942de4bf74be None None] libvirt.inject_partition = -2 {{(pid=61962) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 502.801708] env[61962]: DEBUG oslo_service.service [None req-b30c84a2-b146-4d0b-8208-942de4bf74be None None] libvirt.inject_password = False {{(pid=61962) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 502.801874] env[61962]: DEBUG oslo_service.service [None req-b30c84a2-b146-4d0b-8208-942de4bf74be None None] libvirt.iscsi_iface = None {{(pid=61962) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 502.802045] env[61962]: DEBUG oslo_service.service [None req-b30c84a2-b146-4d0b-8208-942de4bf74be None None] libvirt.iser_use_multipath = False {{(pid=61962) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 502.802213] env[61962]: DEBUG oslo_service.service [None req-b30c84a2-b146-4d0b-8208-942de4bf74be None None] libvirt.live_migration_bandwidth = 0 {{(pid=61962) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 502.802375] env[61962]: DEBUG oslo_service.service [None req-b30c84a2-b146-4d0b-8208-942de4bf74be None None] libvirt.live_migration_completion_timeout = 800 {{(pid=61962) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 502.802539] env[61962]: DEBUG oslo_service.service [None req-b30c84a2-b146-4d0b-8208-942de4bf74be None None] libvirt.live_migration_downtime = 500 {{(pid=61962) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 502.802702] env[61962]: DEBUG oslo_service.service [None req-b30c84a2-b146-4d0b-8208-942de4bf74be None None] libvirt.live_migration_downtime_delay = 75 {{(pid=61962) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 502.802885] env[61962]: DEBUG oslo_service.service [None req-b30c84a2-b146-4d0b-8208-942de4bf74be None None] libvirt.live_migration_downtime_steps = 10 {{(pid=61962) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 502.803064] env[61962]: DEBUG oslo_service.service [None req-b30c84a2-b146-4d0b-8208-942de4bf74be None None] libvirt.live_migration_inbound_addr = None {{(pid=61962) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 502.803231] env[61962]: DEBUG oslo_service.service [None req-b30c84a2-b146-4d0b-8208-942de4bf74be None None] libvirt.live_migration_permit_auto_converge = False {{(pid=61962) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 502.803393] env[61962]: DEBUG oslo_service.service [None req-b30c84a2-b146-4d0b-8208-942de4bf74be None None] libvirt.live_migration_permit_post_copy = False {{(pid=61962) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 502.803557] env[61962]: DEBUG oslo_service.service [None req-b30c84a2-b146-4d0b-8208-942de4bf74be None None] libvirt.live_migration_scheme = None {{(pid=61962) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 502.803744] env[61962]: DEBUG oslo_service.service [None req-b30c84a2-b146-4d0b-8208-942de4bf74be None None] libvirt.live_migration_timeout_action = abort {{(pid=61962) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 502.803926] env[61962]: DEBUG oslo_service.service [None req-b30c84a2-b146-4d0b-8208-942de4bf74be None None] libvirt.live_migration_tunnelled = False {{(pid=61962) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 502.804109] env[61962]: DEBUG oslo_service.service [None req-b30c84a2-b146-4d0b-8208-942de4bf74be None None] libvirt.live_migration_uri = None {{(pid=61962) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 502.804279] env[61962]: DEBUG oslo_service.service [None req-b30c84a2-b146-4d0b-8208-942de4bf74be None None] libvirt.live_migration_with_native_tls = False {{(pid=61962) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 502.804441] env[61962]: DEBUG oslo_service.service [None req-b30c84a2-b146-4d0b-8208-942de4bf74be None None] libvirt.max_queues = None {{(pid=61962) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 502.804607] env[61962]: DEBUG oslo_service.service [None req-b30c84a2-b146-4d0b-8208-942de4bf74be None None] libvirt.mem_stats_period_seconds = 10 {{(pid=61962) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 502.804845] env[61962]: DEBUG oslo_service.service [None req-b30c84a2-b146-4d0b-8208-942de4bf74be None None] libvirt.migration_inbound_addr = 10.180.1.21 {{(pid=61962) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 502.805017] env[61962]: DEBUG oslo_service.service [None req-b30c84a2-b146-4d0b-8208-942de4bf74be None None] libvirt.nfs_mount_options = None {{(pid=61962) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 502.805330] env[61962]: DEBUG oslo_service.service [None req-b30c84a2-b146-4d0b-8208-942de4bf74be None None] libvirt.nfs_mount_point_base = /opt/stack/data/n-cpu-1/mnt {{(pid=61962) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 502.805506] env[61962]: DEBUG oslo_service.service [None req-b30c84a2-b146-4d0b-8208-942de4bf74be None None] libvirt.num_aoe_discover_tries = 3 {{(pid=61962) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 502.805674] env[61962]: DEBUG oslo_service.service [None req-b30c84a2-b146-4d0b-8208-942de4bf74be None None] libvirt.num_iser_scan_tries = 5 {{(pid=61962) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 502.805839] env[61962]: DEBUG oslo_service.service [None req-b30c84a2-b146-4d0b-8208-942de4bf74be None None] libvirt.num_memory_encrypted_guests = None {{(pid=61962) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 502.806014] env[61962]: DEBUG oslo_service.service [None req-b30c84a2-b146-4d0b-8208-942de4bf74be None None] libvirt.num_nvme_discover_tries = 5 {{(pid=61962) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 502.806189] env[61962]: DEBUG oslo_service.service [None req-b30c84a2-b146-4d0b-8208-942de4bf74be None None] libvirt.num_pcie_ports = 0 {{(pid=61962) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 502.806357] env[61962]: DEBUG oslo_service.service [None req-b30c84a2-b146-4d0b-8208-942de4bf74be None None] libvirt.num_volume_scan_tries = 5 {{(pid=61962) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 502.806525] env[61962]: DEBUG oslo_service.service [None req-b30c84a2-b146-4d0b-8208-942de4bf74be None None] libvirt.pmem_namespaces = [] {{(pid=61962) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 502.806686] env[61962]: DEBUG oslo_service.service [None req-b30c84a2-b146-4d0b-8208-942de4bf74be None None] libvirt.quobyte_client_cfg = None {{(pid=61962) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 502.806973] env[61962]: DEBUG oslo_service.service [None req-b30c84a2-b146-4d0b-8208-942de4bf74be None None] libvirt.quobyte_mount_point_base = /opt/stack/data/n-cpu-1/mnt {{(pid=61962) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 502.807162] env[61962]: DEBUG oslo_service.service [None req-b30c84a2-b146-4d0b-8208-942de4bf74be None None] libvirt.rbd_connect_timeout = 5 {{(pid=61962) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 502.807329] env[61962]: DEBUG oslo_service.service [None req-b30c84a2-b146-4d0b-8208-942de4bf74be None None] libvirt.rbd_destroy_volume_retries = 12 {{(pid=61962) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 502.807495] env[61962]: DEBUG oslo_service.service [None req-b30c84a2-b146-4d0b-8208-942de4bf74be None None] libvirt.rbd_destroy_volume_retry_interval = 5 {{(pid=61962) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 502.807655] env[61962]: DEBUG oslo_service.service [None req-b30c84a2-b146-4d0b-8208-942de4bf74be None None] libvirt.rbd_secret_uuid = None {{(pid=61962) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 502.807815] env[61962]: DEBUG oslo_service.service [None req-b30c84a2-b146-4d0b-8208-942de4bf74be None None] libvirt.rbd_user = None {{(pid=61962) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 502.807977] env[61962]: DEBUG oslo_service.service [None req-b30c84a2-b146-4d0b-8208-942de4bf74be None None] libvirt.realtime_scheduler_priority = 1 {{(pid=61962) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 502.808165] env[61962]: DEBUG oslo_service.service [None req-b30c84a2-b146-4d0b-8208-942de4bf74be None None] libvirt.remote_filesystem_transport = ssh {{(pid=61962) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 502.808327] env[61962]: DEBUG oslo_service.service [None req-b30c84a2-b146-4d0b-8208-942de4bf74be None None] libvirt.rescue_image_id = None {{(pid=61962) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 502.808487] env[61962]: DEBUG oslo_service.service [None req-b30c84a2-b146-4d0b-8208-942de4bf74be None None] libvirt.rescue_kernel_id = None {{(pid=61962) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 502.808647] env[61962]: DEBUG oslo_service.service [None req-b30c84a2-b146-4d0b-8208-942de4bf74be None None] libvirt.rescue_ramdisk_id = None {{(pid=61962) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 502.808817] env[61962]: DEBUG oslo_service.service [None req-b30c84a2-b146-4d0b-8208-942de4bf74be None None] libvirt.rng_dev_path = /dev/urandom {{(pid=61962) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 502.808980] env[61962]: DEBUG oslo_service.service [None req-b30c84a2-b146-4d0b-8208-942de4bf74be None None] libvirt.rx_queue_size = None {{(pid=61962) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 502.809163] env[61962]: DEBUG oslo_service.service [None req-b30c84a2-b146-4d0b-8208-942de4bf74be None None] libvirt.smbfs_mount_options = {{(pid=61962) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 502.809441] env[61962]: DEBUG oslo_service.service [None req-b30c84a2-b146-4d0b-8208-942de4bf74be None None] libvirt.smbfs_mount_point_base = /opt/stack/data/n-cpu-1/mnt {{(pid=61962) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 502.809618] env[61962]: DEBUG oslo_service.service [None req-b30c84a2-b146-4d0b-8208-942de4bf74be None None] libvirt.snapshot_compression = False {{(pid=61962) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 502.809785] env[61962]: DEBUG oslo_service.service [None req-b30c84a2-b146-4d0b-8208-942de4bf74be None None] libvirt.snapshot_image_format = None {{(pid=61962) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 502.810014] env[61962]: DEBUG oslo_service.service [None req-b30c84a2-b146-4d0b-8208-942de4bf74be None None] libvirt.snapshots_directory = /opt/stack/data/nova/instances/snapshots {{(pid=61962) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 502.810198] env[61962]: DEBUG oslo_service.service [None req-b30c84a2-b146-4d0b-8208-942de4bf74be None None] libvirt.sparse_logical_volumes = False {{(pid=61962) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 502.810360] env[61962]: DEBUG oslo_service.service [None req-b30c84a2-b146-4d0b-8208-942de4bf74be None None] libvirt.swtpm_enabled = False {{(pid=61962) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 502.810531] env[61962]: DEBUG oslo_service.service [None req-b30c84a2-b146-4d0b-8208-942de4bf74be None None] libvirt.swtpm_group = tss {{(pid=61962) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 502.810711] env[61962]: DEBUG oslo_service.service [None req-b30c84a2-b146-4d0b-8208-942de4bf74be None None] libvirt.swtpm_user = tss {{(pid=61962) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 502.810910] env[61962]: DEBUG oslo_service.service [None req-b30c84a2-b146-4d0b-8208-942de4bf74be None None] libvirt.sysinfo_serial = unique {{(pid=61962) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 502.811088] env[61962]: DEBUG oslo_service.service [None req-b30c84a2-b146-4d0b-8208-942de4bf74be None None] libvirt.tb_cache_size = None {{(pid=61962) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 502.811252] env[61962]: DEBUG oslo_service.service [None req-b30c84a2-b146-4d0b-8208-942de4bf74be None None] libvirt.tx_queue_size = None {{(pid=61962) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 502.811421] env[61962]: DEBUG oslo_service.service [None req-b30c84a2-b146-4d0b-8208-942de4bf74be None None] libvirt.uid_maps = [] {{(pid=61962) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 502.811587] env[61962]: DEBUG oslo_service.service [None req-b30c84a2-b146-4d0b-8208-942de4bf74be None None] libvirt.use_virtio_for_bridges = True {{(pid=61962) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 502.811761] env[61962]: DEBUG oslo_service.service [None req-b30c84a2-b146-4d0b-8208-942de4bf74be None None] libvirt.virt_type = kvm {{(pid=61962) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 502.811931] env[61962]: DEBUG oslo_service.service [None req-b30c84a2-b146-4d0b-8208-942de4bf74be None None] libvirt.volume_clear = zero {{(pid=61962) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 502.812110] env[61962]: DEBUG oslo_service.service [None req-b30c84a2-b146-4d0b-8208-942de4bf74be None None] libvirt.volume_clear_size = 0 {{(pid=61962) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 502.812281] env[61962]: DEBUG oslo_service.service [None req-b30c84a2-b146-4d0b-8208-942de4bf74be None None] libvirt.volume_use_multipath = False {{(pid=61962) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 502.812453] env[61962]: DEBUG oslo_service.service [None req-b30c84a2-b146-4d0b-8208-942de4bf74be None None] libvirt.vzstorage_cache_path = None {{(pid=61962) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 502.812628] env[61962]: DEBUG oslo_service.service [None req-b30c84a2-b146-4d0b-8208-942de4bf74be None None] libvirt.vzstorage_log_path = /var/log/vstorage/%(cluster_name)s/nova.log.gz {{(pid=61962) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 502.812827] env[61962]: DEBUG oslo_service.service [None req-b30c84a2-b146-4d0b-8208-942de4bf74be None None] libvirt.vzstorage_mount_group = qemu {{(pid=61962) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 502.813016] env[61962]: DEBUG oslo_service.service [None req-b30c84a2-b146-4d0b-8208-942de4bf74be None None] libvirt.vzstorage_mount_opts = [] {{(pid=61962) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 502.813202] env[61962]: DEBUG oslo_service.service [None req-b30c84a2-b146-4d0b-8208-942de4bf74be None None] libvirt.vzstorage_mount_perms = 0770 {{(pid=61962) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 502.813483] env[61962]: DEBUG oslo_service.service [None req-b30c84a2-b146-4d0b-8208-942de4bf74be None None] libvirt.vzstorage_mount_point_base = /opt/stack/data/n-cpu-1/mnt {{(pid=61962) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 502.813663] env[61962]: DEBUG oslo_service.service [None req-b30c84a2-b146-4d0b-8208-942de4bf74be None None] libvirt.vzstorage_mount_user = stack {{(pid=61962) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 502.813862] env[61962]: DEBUG oslo_service.service [None req-b30c84a2-b146-4d0b-8208-942de4bf74be None None] libvirt.wait_soft_reboot_seconds = 120 {{(pid=61962) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 502.814057] env[61962]: DEBUG oslo_service.service [None req-b30c84a2-b146-4d0b-8208-942de4bf74be None None] neutron.auth_section = None {{(pid=61962) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 502.814238] env[61962]: DEBUG oslo_service.service [None req-b30c84a2-b146-4d0b-8208-942de4bf74be None None] neutron.auth_type = password {{(pid=61962) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 502.814404] env[61962]: DEBUG oslo_service.service [None req-b30c84a2-b146-4d0b-8208-942de4bf74be None None] neutron.cafile = None {{(pid=61962) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 502.814566] env[61962]: DEBUG oslo_service.service [None req-b30c84a2-b146-4d0b-8208-942de4bf74be None None] neutron.certfile = None {{(pid=61962) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 502.814734] env[61962]: DEBUG oslo_service.service [None req-b30c84a2-b146-4d0b-8208-942de4bf74be None None] neutron.collect_timing = False {{(pid=61962) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 502.814897] env[61962]: DEBUG oslo_service.service [None req-b30c84a2-b146-4d0b-8208-942de4bf74be None None] neutron.connect_retries = None {{(pid=61962) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 502.815070] env[61962]: DEBUG oslo_service.service [None req-b30c84a2-b146-4d0b-8208-942de4bf74be None None] neutron.connect_retry_delay = None {{(pid=61962) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 502.815248] env[61962]: DEBUG oslo_service.service [None req-b30c84a2-b146-4d0b-8208-942de4bf74be None None] neutron.default_floating_pool = public {{(pid=61962) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 502.815410] env[61962]: DEBUG oslo_service.service [None req-b30c84a2-b146-4d0b-8208-942de4bf74be None None] neutron.endpoint_override = None {{(pid=61962) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 502.815574] env[61962]: DEBUG oslo_service.service [None req-b30c84a2-b146-4d0b-8208-942de4bf74be None None] neutron.extension_sync_interval = 600 {{(pid=61962) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 502.815738] env[61962]: DEBUG oslo_service.service [None req-b30c84a2-b146-4d0b-8208-942de4bf74be None None] neutron.http_retries = 3 {{(pid=61962) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 502.815901] env[61962]: DEBUG oslo_service.service [None req-b30c84a2-b146-4d0b-8208-942de4bf74be None None] neutron.insecure = False {{(pid=61962) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 502.816074] env[61962]: DEBUG oslo_service.service [None req-b30c84a2-b146-4d0b-8208-942de4bf74be None None] neutron.keyfile = None {{(pid=61962) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 502.816238] env[61962]: DEBUG oslo_service.service [None req-b30c84a2-b146-4d0b-8208-942de4bf74be None None] neutron.max_version = None {{(pid=61962) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 502.816410] env[61962]: DEBUG oslo_service.service [None req-b30c84a2-b146-4d0b-8208-942de4bf74be None None] neutron.metadata_proxy_shared_secret = **** {{(pid=61962) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 502.816572] env[61962]: DEBUG oslo_service.service [None req-b30c84a2-b146-4d0b-8208-942de4bf74be None None] neutron.min_version = None {{(pid=61962) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 502.816743] env[61962]: DEBUG oslo_service.service [None req-b30c84a2-b146-4d0b-8208-942de4bf74be None None] neutron.ovs_bridge = br-int {{(pid=61962) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 502.816914] env[61962]: DEBUG oslo_service.service [None req-b30c84a2-b146-4d0b-8208-942de4bf74be None None] neutron.physnets = [] {{(pid=61962) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 502.817097] env[61962]: DEBUG oslo_service.service [None req-b30c84a2-b146-4d0b-8208-942de4bf74be None None] neutron.region_name = RegionOne {{(pid=61962) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 502.817264] env[61962]: DEBUG oslo_service.service [None req-b30c84a2-b146-4d0b-8208-942de4bf74be None None] neutron.retriable_status_codes = None {{(pid=61962) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 502.817437] env[61962]: DEBUG oslo_service.service [None req-b30c84a2-b146-4d0b-8208-942de4bf74be None None] neutron.service_metadata_proxy = True {{(pid=61962) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 502.817602] env[61962]: DEBUG oslo_service.service [None req-b30c84a2-b146-4d0b-8208-942de4bf74be None None] neutron.service_name = None {{(pid=61962) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 502.817774] env[61962]: DEBUG oslo_service.service [None req-b30c84a2-b146-4d0b-8208-942de4bf74be None None] neutron.service_type = network {{(pid=61962) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 502.817940] env[61962]: DEBUG oslo_service.service [None req-b30c84a2-b146-4d0b-8208-942de4bf74be None None] neutron.split_loggers = False {{(pid=61962) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 502.818113] env[61962]: DEBUG oslo_service.service [None req-b30c84a2-b146-4d0b-8208-942de4bf74be None None] neutron.status_code_retries = None {{(pid=61962) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 502.818276] env[61962]: DEBUG oslo_service.service [None req-b30c84a2-b146-4d0b-8208-942de4bf74be None None] neutron.status_code_retry_delay = None {{(pid=61962) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 502.818434] env[61962]: DEBUG oslo_service.service [None req-b30c84a2-b146-4d0b-8208-942de4bf74be None None] neutron.timeout = None {{(pid=61962) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 502.818615] env[61962]: DEBUG oslo_service.service [None req-b30c84a2-b146-4d0b-8208-942de4bf74be None None] neutron.valid_interfaces = ['internal', 'public'] {{(pid=61962) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 502.818778] env[61962]: DEBUG oslo_service.service [None req-b30c84a2-b146-4d0b-8208-942de4bf74be None None] neutron.version = None {{(pid=61962) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 502.818952] env[61962]: DEBUG oslo_service.service [None req-b30c84a2-b146-4d0b-8208-942de4bf74be None None] notifications.bdms_in_notifications = False {{(pid=61962) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 502.819148] env[61962]: DEBUG oslo_service.service [None req-b30c84a2-b146-4d0b-8208-942de4bf74be None None] notifications.default_level = INFO {{(pid=61962) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 502.819329] env[61962]: DEBUG oslo_service.service [None req-b30c84a2-b146-4d0b-8208-942de4bf74be None None] notifications.notification_format = unversioned {{(pid=61962) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 502.819497] env[61962]: DEBUG oslo_service.service [None req-b30c84a2-b146-4d0b-8208-942de4bf74be None None] notifications.notify_on_state_change = None {{(pid=61962) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 502.819676] env[61962]: DEBUG oslo_service.service [None req-b30c84a2-b146-4d0b-8208-942de4bf74be None None] notifications.versioned_notifications_topics = ['versioned_notifications'] {{(pid=61962) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 502.819858] env[61962]: DEBUG oslo_service.service [None req-b30c84a2-b146-4d0b-8208-942de4bf74be None None] pci.alias = [] {{(pid=61962) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 502.820042] env[61962]: DEBUG oslo_service.service [None req-b30c84a2-b146-4d0b-8208-942de4bf74be None None] pci.device_spec = [] {{(pid=61962) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 502.820281] env[61962]: DEBUG oslo_service.service [None req-b30c84a2-b146-4d0b-8208-942de4bf74be None None] pci.report_in_placement = False {{(pid=61962) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 502.820478] env[61962]: DEBUG oslo_service.service [None req-b30c84a2-b146-4d0b-8208-942de4bf74be None None] placement.auth_section = None {{(pid=61962) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 502.820658] env[61962]: DEBUG oslo_service.service [None req-b30c84a2-b146-4d0b-8208-942de4bf74be None None] placement.auth_type = password {{(pid=61962) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 502.820860] env[61962]: DEBUG oslo_service.service [None req-b30c84a2-b146-4d0b-8208-942de4bf74be None None] placement.auth_url = http://10.180.1.21/identity {{(pid=61962) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 502.821044] env[61962]: DEBUG oslo_service.service [None req-b30c84a2-b146-4d0b-8208-942de4bf74be None None] placement.cafile = None {{(pid=61962) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 502.821212] env[61962]: DEBUG oslo_service.service [None req-b30c84a2-b146-4d0b-8208-942de4bf74be None None] placement.certfile = None {{(pid=61962) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 502.821379] env[61962]: DEBUG oslo_service.service [None req-b30c84a2-b146-4d0b-8208-942de4bf74be None None] placement.collect_timing = False {{(pid=61962) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 502.821537] env[61962]: DEBUG oslo_service.service [None req-b30c84a2-b146-4d0b-8208-942de4bf74be None None] placement.connect_retries = None {{(pid=61962) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 502.821700] env[61962]: DEBUG oslo_service.service [None req-b30c84a2-b146-4d0b-8208-942de4bf74be None None] placement.connect_retry_delay = None {{(pid=61962) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 502.821864] env[61962]: DEBUG oslo_service.service [None req-b30c84a2-b146-4d0b-8208-942de4bf74be None None] placement.default_domain_id = None {{(pid=61962) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 502.822031] env[61962]: DEBUG oslo_service.service [None req-b30c84a2-b146-4d0b-8208-942de4bf74be None None] placement.default_domain_name = None {{(pid=61962) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 502.822195] env[61962]: DEBUG oslo_service.service [None req-b30c84a2-b146-4d0b-8208-942de4bf74be None None] placement.domain_id = None {{(pid=61962) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 502.822354] env[61962]: DEBUG oslo_service.service [None req-b30c84a2-b146-4d0b-8208-942de4bf74be None None] placement.domain_name = None {{(pid=61962) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 502.822514] env[61962]: DEBUG oslo_service.service [None req-b30c84a2-b146-4d0b-8208-942de4bf74be None None] placement.endpoint_override = None {{(pid=61962) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 502.822675] env[61962]: DEBUG oslo_service.service [None req-b30c84a2-b146-4d0b-8208-942de4bf74be None None] placement.insecure = False {{(pid=61962) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 502.822857] env[61962]: DEBUG oslo_service.service [None req-b30c84a2-b146-4d0b-8208-942de4bf74be None None] placement.keyfile = None {{(pid=61962) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 502.823030] env[61962]: DEBUG oslo_service.service [None req-b30c84a2-b146-4d0b-8208-942de4bf74be None None] placement.max_version = None {{(pid=61962) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 502.823194] env[61962]: DEBUG oslo_service.service [None req-b30c84a2-b146-4d0b-8208-942de4bf74be None None] placement.min_version = None {{(pid=61962) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 502.823364] env[61962]: DEBUG oslo_service.service [None req-b30c84a2-b146-4d0b-8208-942de4bf74be None None] placement.password = **** {{(pid=61962) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 502.823523] env[61962]: DEBUG oslo_service.service [None req-b30c84a2-b146-4d0b-8208-942de4bf74be None None] placement.project_domain_id = None {{(pid=61962) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 502.823689] env[61962]: DEBUG oslo_service.service [None req-b30c84a2-b146-4d0b-8208-942de4bf74be None None] placement.project_domain_name = Default {{(pid=61962) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 502.823853] env[61962]: DEBUG oslo_service.service [None req-b30c84a2-b146-4d0b-8208-942de4bf74be None None] placement.project_id = None {{(pid=61962) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 502.824036] env[61962]: DEBUG oslo_service.service [None req-b30c84a2-b146-4d0b-8208-942de4bf74be None None] placement.project_name = service {{(pid=61962) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 502.824213] env[61962]: DEBUG oslo_service.service [None req-b30c84a2-b146-4d0b-8208-942de4bf74be None None] placement.region_name = RegionOne {{(pid=61962) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 502.824375] env[61962]: DEBUG oslo_service.service [None req-b30c84a2-b146-4d0b-8208-942de4bf74be None None] placement.retriable_status_codes = None {{(pid=61962) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 502.824536] env[61962]: DEBUG oslo_service.service [None req-b30c84a2-b146-4d0b-8208-942de4bf74be None None] placement.service_name = None {{(pid=61962) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 502.824707] env[61962]: DEBUG oslo_service.service [None req-b30c84a2-b146-4d0b-8208-942de4bf74be None None] placement.service_type = placement {{(pid=61962) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 502.824872] env[61962]: DEBUG oslo_service.service [None req-b30c84a2-b146-4d0b-8208-942de4bf74be None None] placement.split_loggers = False {{(pid=61962) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 502.825046] env[61962]: DEBUG oslo_service.service [None req-b30c84a2-b146-4d0b-8208-942de4bf74be None None] placement.status_code_retries = None {{(pid=61962) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 502.825222] env[61962]: DEBUG oslo_service.service [None req-b30c84a2-b146-4d0b-8208-942de4bf74be None None] placement.status_code_retry_delay = None {{(pid=61962) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 502.825387] env[61962]: DEBUG oslo_service.service [None req-b30c84a2-b146-4d0b-8208-942de4bf74be None None] placement.system_scope = None {{(pid=61962) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 502.825547] env[61962]: DEBUG oslo_service.service [None req-b30c84a2-b146-4d0b-8208-942de4bf74be None None] placement.timeout = None {{(pid=61962) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 502.825704] env[61962]: DEBUG oslo_service.service [None req-b30c84a2-b146-4d0b-8208-942de4bf74be None None] placement.trust_id = None {{(pid=61962) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 502.825865] env[61962]: DEBUG oslo_service.service [None req-b30c84a2-b146-4d0b-8208-942de4bf74be None None] placement.user_domain_id = None {{(pid=61962) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 502.826042] env[61962]: DEBUG oslo_service.service [None req-b30c84a2-b146-4d0b-8208-942de4bf74be None None] placement.user_domain_name = Default {{(pid=61962) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 502.826205] env[61962]: DEBUG oslo_service.service [None req-b30c84a2-b146-4d0b-8208-942de4bf74be None None] placement.user_id = None {{(pid=61962) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 502.826380] env[61962]: DEBUG oslo_service.service [None req-b30c84a2-b146-4d0b-8208-942de4bf74be None None] placement.username = placement {{(pid=61962) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 502.826561] env[61962]: DEBUG oslo_service.service [None req-b30c84a2-b146-4d0b-8208-942de4bf74be None None] placement.valid_interfaces = ['internal', 'public'] {{(pid=61962) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 502.826721] env[61962]: DEBUG oslo_service.service [None req-b30c84a2-b146-4d0b-8208-942de4bf74be None None] placement.version = None {{(pid=61962) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 502.826912] env[61962]: DEBUG oslo_service.service [None req-b30c84a2-b146-4d0b-8208-942de4bf74be None None] quota.cores = 20 {{(pid=61962) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 502.827129] env[61962]: DEBUG oslo_service.service [None req-b30c84a2-b146-4d0b-8208-942de4bf74be None None] quota.count_usage_from_placement = False {{(pid=61962) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 502.827311] env[61962]: DEBUG oslo_service.service [None req-b30c84a2-b146-4d0b-8208-942de4bf74be None None] quota.driver = nova.quota.DbQuotaDriver {{(pid=61962) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 502.827489] env[61962]: DEBUG oslo_service.service [None req-b30c84a2-b146-4d0b-8208-942de4bf74be None None] quota.injected_file_content_bytes = 10240 {{(pid=61962) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 502.827658] env[61962]: DEBUG oslo_service.service [None req-b30c84a2-b146-4d0b-8208-942de4bf74be None None] quota.injected_file_path_length = 255 {{(pid=61962) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 502.827827] env[61962]: DEBUG oslo_service.service [None req-b30c84a2-b146-4d0b-8208-942de4bf74be None None] quota.injected_files = 5 {{(pid=61962) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 502.827996] env[61962]: DEBUG oslo_service.service [None req-b30c84a2-b146-4d0b-8208-942de4bf74be None None] quota.instances = 10 {{(pid=61962) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 502.828181] env[61962]: DEBUG oslo_service.service [None req-b30c84a2-b146-4d0b-8208-942de4bf74be None None] quota.key_pairs = 100 {{(pid=61962) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 502.828348] env[61962]: DEBUG oslo_service.service [None req-b30c84a2-b146-4d0b-8208-942de4bf74be None None] quota.metadata_items = 128 {{(pid=61962) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 502.828514] env[61962]: DEBUG oslo_service.service [None req-b30c84a2-b146-4d0b-8208-942de4bf74be None None] quota.ram = 51200 {{(pid=61962) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 502.828680] env[61962]: DEBUG oslo_service.service [None req-b30c84a2-b146-4d0b-8208-942de4bf74be None None] quota.recheck_quota = True {{(pid=61962) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 502.828847] env[61962]: DEBUG oslo_service.service [None req-b30c84a2-b146-4d0b-8208-942de4bf74be None None] quota.server_group_members = 10 {{(pid=61962) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 502.829020] env[61962]: DEBUG oslo_service.service [None req-b30c84a2-b146-4d0b-8208-942de4bf74be None None] quota.server_groups = 10 {{(pid=61962) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 502.829199] env[61962]: DEBUG oslo_service.service [None req-b30c84a2-b146-4d0b-8208-942de4bf74be None None] scheduler.discover_hosts_in_cells_interval = -1 {{(pid=61962) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 502.829366] env[61962]: DEBUG oslo_service.service [None req-b30c84a2-b146-4d0b-8208-942de4bf74be None None] scheduler.enable_isolated_aggregate_filtering = False {{(pid=61962) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 502.829533] env[61962]: DEBUG oslo_service.service [None req-b30c84a2-b146-4d0b-8208-942de4bf74be None None] scheduler.image_metadata_prefilter = False {{(pid=61962) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 502.829698] env[61962]: DEBUG oslo_service.service [None req-b30c84a2-b146-4d0b-8208-942de4bf74be None None] scheduler.limit_tenants_to_placement_aggregate = False {{(pid=61962) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 502.829867] env[61962]: DEBUG oslo_service.service [None req-b30c84a2-b146-4d0b-8208-942de4bf74be None None] scheduler.max_attempts = 3 {{(pid=61962) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 502.830064] env[61962]: DEBUG oslo_service.service [None req-b30c84a2-b146-4d0b-8208-942de4bf74be None None] scheduler.max_placement_results = 1000 {{(pid=61962) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 502.830246] env[61962]: DEBUG oslo_service.service [None req-b30c84a2-b146-4d0b-8208-942de4bf74be None None] scheduler.placement_aggregate_required_for_tenants = False {{(pid=61962) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 502.830414] env[61962]: DEBUG oslo_service.service [None req-b30c84a2-b146-4d0b-8208-942de4bf74be None None] scheduler.query_placement_for_image_type_support = False {{(pid=61962) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 502.830579] env[61962]: DEBUG oslo_service.service [None req-b30c84a2-b146-4d0b-8208-942de4bf74be None None] scheduler.query_placement_for_routed_network_aggregates = False {{(pid=61962) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 502.830783] env[61962]: DEBUG oslo_service.service [None req-b30c84a2-b146-4d0b-8208-942de4bf74be None None] scheduler.workers = 2 {{(pid=61962) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 502.830981] env[61962]: DEBUG oslo_service.service [None req-b30c84a2-b146-4d0b-8208-942de4bf74be None None] filter_scheduler.aggregate_image_properties_isolation_namespace = None {{(pid=61962) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 502.831171] env[61962]: DEBUG oslo_service.service [None req-b30c84a2-b146-4d0b-8208-942de4bf74be None None] filter_scheduler.aggregate_image_properties_isolation_separator = . {{(pid=61962) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 502.831353] env[61962]: DEBUG oslo_service.service [None req-b30c84a2-b146-4d0b-8208-942de4bf74be None None] filter_scheduler.available_filters = ['nova.scheduler.filters.all_filters'] {{(pid=61962) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 502.831526] env[61962]: DEBUG oslo_service.service [None req-b30c84a2-b146-4d0b-8208-942de4bf74be None None] filter_scheduler.build_failure_weight_multiplier = 1000000.0 {{(pid=61962) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 502.831695] env[61962]: DEBUG oslo_service.service [None req-b30c84a2-b146-4d0b-8208-942de4bf74be None None] filter_scheduler.cpu_weight_multiplier = 1.0 {{(pid=61962) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 502.831859] env[61962]: DEBUG oslo_service.service [None req-b30c84a2-b146-4d0b-8208-942de4bf74be None None] filter_scheduler.cross_cell_move_weight_multiplier = 1000000.0 {{(pid=61962) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 502.832033] env[61962]: DEBUG oslo_service.service [None req-b30c84a2-b146-4d0b-8208-942de4bf74be None None] filter_scheduler.disk_weight_multiplier = 1.0 {{(pid=61962) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 502.832232] env[61962]: DEBUG oslo_service.service [None req-b30c84a2-b146-4d0b-8208-942de4bf74be None None] filter_scheduler.enabled_filters = ['ComputeFilter', 'ComputeCapabilitiesFilter', 'ImagePropertiesFilter', 'ServerGroupAntiAffinityFilter', 'ServerGroupAffinityFilter', 'SameHostFilter', 'DifferentHostFilter'] {{(pid=61962) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 502.832402] env[61962]: DEBUG oslo_service.service [None req-b30c84a2-b146-4d0b-8208-942de4bf74be None None] filter_scheduler.host_subset_size = 1 {{(pid=61962) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 502.832568] env[61962]: DEBUG oslo_service.service [None req-b30c84a2-b146-4d0b-8208-942de4bf74be None None] filter_scheduler.hypervisor_version_weight_multiplier = 1.0 {{(pid=61962) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 502.832757] env[61962]: DEBUG oslo_service.service [None req-b30c84a2-b146-4d0b-8208-942de4bf74be None None] filter_scheduler.image_properties_default_architecture = None {{(pid=61962) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 502.832933] env[61962]: DEBUG oslo_service.service [None req-b30c84a2-b146-4d0b-8208-942de4bf74be None None] filter_scheduler.io_ops_weight_multiplier = -1.0 {{(pid=61962) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 502.833142] env[61962]: DEBUG oslo_service.service [None req-b30c84a2-b146-4d0b-8208-942de4bf74be None None] filter_scheduler.isolated_hosts = [] {{(pid=61962) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 502.833329] env[61962]: DEBUG oslo_service.service [None req-b30c84a2-b146-4d0b-8208-942de4bf74be None None] filter_scheduler.isolated_images = [] {{(pid=61962) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 502.833489] env[61962]: DEBUG oslo_service.service [None req-b30c84a2-b146-4d0b-8208-942de4bf74be None None] filter_scheduler.max_instances_per_host = 50 {{(pid=61962) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 502.833658] env[61962]: DEBUG oslo_service.service [None req-b30c84a2-b146-4d0b-8208-942de4bf74be None None] filter_scheduler.max_io_ops_per_host = 8 {{(pid=61962) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 502.833830] env[61962]: DEBUG oslo_service.service [None req-b30c84a2-b146-4d0b-8208-942de4bf74be None None] filter_scheduler.num_instances_weight_multiplier = 0.0 {{(pid=61962) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 502.833998] env[61962]: DEBUG oslo_service.service [None req-b30c84a2-b146-4d0b-8208-942de4bf74be None None] filter_scheduler.pci_in_placement = False {{(pid=61962) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 502.834179] env[61962]: DEBUG oslo_service.service [None req-b30c84a2-b146-4d0b-8208-942de4bf74be None None] filter_scheduler.pci_weight_multiplier = 1.0 {{(pid=61962) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 502.834379] env[61962]: DEBUG oslo_service.service [None req-b30c84a2-b146-4d0b-8208-942de4bf74be None None] filter_scheduler.ram_weight_multiplier = 1.0 {{(pid=61962) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 502.834509] env[61962]: DEBUG oslo_service.service [None req-b30c84a2-b146-4d0b-8208-942de4bf74be None None] filter_scheduler.restrict_isolated_hosts_to_isolated_images = True {{(pid=61962) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 502.834673] env[61962]: DEBUG oslo_service.service [None req-b30c84a2-b146-4d0b-8208-942de4bf74be None None] filter_scheduler.shuffle_best_same_weighed_hosts = False {{(pid=61962) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 502.834839] env[61962]: DEBUG oslo_service.service [None req-b30c84a2-b146-4d0b-8208-942de4bf74be None None] filter_scheduler.soft_affinity_weight_multiplier = 1.0 {{(pid=61962) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 502.835012] env[61962]: DEBUG oslo_service.service [None req-b30c84a2-b146-4d0b-8208-942de4bf74be None None] filter_scheduler.soft_anti_affinity_weight_multiplier = 1.0 {{(pid=61962) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 502.835184] env[61962]: DEBUG oslo_service.service [None req-b30c84a2-b146-4d0b-8208-942de4bf74be None None] filter_scheduler.track_instance_changes = True {{(pid=61962) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 502.835364] env[61962]: DEBUG oslo_service.service [None req-b30c84a2-b146-4d0b-8208-942de4bf74be None None] filter_scheduler.weight_classes = ['nova.scheduler.weights.all_weighers'] {{(pid=61962) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 502.835535] env[61962]: DEBUG oslo_service.service [None req-b30c84a2-b146-4d0b-8208-942de4bf74be None None] metrics.required = True {{(pid=61962) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 502.835702] env[61962]: DEBUG oslo_service.service [None req-b30c84a2-b146-4d0b-8208-942de4bf74be None None] metrics.weight_multiplier = 1.0 {{(pid=61962) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 502.835866] env[61962]: DEBUG oslo_service.service [None req-b30c84a2-b146-4d0b-8208-942de4bf74be None None] metrics.weight_of_unavailable = -10000.0 {{(pid=61962) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 502.836058] env[61962]: DEBUG oslo_service.service [None req-b30c84a2-b146-4d0b-8208-942de4bf74be None None] metrics.weight_setting = [] {{(pid=61962) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 502.836391] env[61962]: DEBUG oslo_service.service [None req-b30c84a2-b146-4d0b-8208-942de4bf74be None None] serial_console.base_url = ws://127.0.0.1:6083/ {{(pid=61962) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 502.836569] env[61962]: DEBUG oslo_service.service [None req-b30c84a2-b146-4d0b-8208-942de4bf74be None None] serial_console.enabled = False {{(pid=61962) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 502.836750] env[61962]: DEBUG oslo_service.service [None req-b30c84a2-b146-4d0b-8208-942de4bf74be None None] serial_console.port_range = 10000:20000 {{(pid=61962) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 502.836925] env[61962]: DEBUG oslo_service.service [None req-b30c84a2-b146-4d0b-8208-942de4bf74be None None] serial_console.proxyclient_address = 127.0.0.1 {{(pid=61962) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 502.837110] env[61962]: DEBUG oslo_service.service [None req-b30c84a2-b146-4d0b-8208-942de4bf74be None None] serial_console.serialproxy_host = 0.0.0.0 {{(pid=61962) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 502.837283] env[61962]: DEBUG oslo_service.service [None req-b30c84a2-b146-4d0b-8208-942de4bf74be None None] serial_console.serialproxy_port = 6083 {{(pid=61962) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 502.837451] env[61962]: DEBUG oslo_service.service [None req-b30c84a2-b146-4d0b-8208-942de4bf74be None None] service_user.auth_section = None {{(pid=61962) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 502.837625] env[61962]: DEBUG oslo_service.service [None req-b30c84a2-b146-4d0b-8208-942de4bf74be None None] service_user.auth_type = password {{(pid=61962) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 502.837788] env[61962]: DEBUG oslo_service.service [None req-b30c84a2-b146-4d0b-8208-942de4bf74be None None] service_user.cafile = None {{(pid=61962) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 502.837948] env[61962]: DEBUG oslo_service.service [None req-b30c84a2-b146-4d0b-8208-942de4bf74be None None] service_user.certfile = None {{(pid=61962) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 502.838123] env[61962]: DEBUG oslo_service.service [None req-b30c84a2-b146-4d0b-8208-942de4bf74be None None] service_user.collect_timing = False {{(pid=61962) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 502.838285] env[61962]: DEBUG oslo_service.service [None req-b30c84a2-b146-4d0b-8208-942de4bf74be None None] service_user.insecure = False {{(pid=61962) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 502.838444] env[61962]: DEBUG oslo_service.service [None req-b30c84a2-b146-4d0b-8208-942de4bf74be None None] service_user.keyfile = None {{(pid=61962) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 502.838616] env[61962]: DEBUG oslo_service.service [None req-b30c84a2-b146-4d0b-8208-942de4bf74be None None] service_user.send_service_user_token = True {{(pid=61962) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 502.838783] env[61962]: DEBUG oslo_service.service [None req-b30c84a2-b146-4d0b-8208-942de4bf74be None None] service_user.split_loggers = False {{(pid=61962) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 502.838946] env[61962]: DEBUG oslo_service.service [None req-b30c84a2-b146-4d0b-8208-942de4bf74be None None] service_user.timeout = None {{(pid=61962) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 502.839164] env[61962]: DEBUG oslo_service.service [None req-b30c84a2-b146-4d0b-8208-942de4bf74be None None] spice.agent_enabled = True {{(pid=61962) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 502.839341] env[61962]: DEBUG oslo_service.service [None req-b30c84a2-b146-4d0b-8208-942de4bf74be None None] spice.enabled = False {{(pid=61962) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 502.839659] env[61962]: DEBUG oslo_service.service [None req-b30c84a2-b146-4d0b-8208-942de4bf74be None None] spice.html5proxy_base_url = http://127.0.0.1:6082/spice_auto.html {{(pid=61962) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 502.839856] env[61962]: DEBUG oslo_service.service [None req-b30c84a2-b146-4d0b-8208-942de4bf74be None None] spice.html5proxy_host = 0.0.0.0 {{(pid=61962) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 502.840040] env[61962]: DEBUG oslo_service.service [None req-b30c84a2-b146-4d0b-8208-942de4bf74be None None] spice.html5proxy_port = 6082 {{(pid=61962) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 502.840211] env[61962]: DEBUG oslo_service.service [None req-b30c84a2-b146-4d0b-8208-942de4bf74be None None] spice.image_compression = None {{(pid=61962) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 502.840374] env[61962]: DEBUG oslo_service.service [None req-b30c84a2-b146-4d0b-8208-942de4bf74be None None] spice.jpeg_compression = None {{(pid=61962) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 502.840537] env[61962]: DEBUG oslo_service.service [None req-b30c84a2-b146-4d0b-8208-942de4bf74be None None] spice.playback_compression = None {{(pid=61962) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 502.840722] env[61962]: DEBUG oslo_service.service [None req-b30c84a2-b146-4d0b-8208-942de4bf74be None None] spice.server_listen = 127.0.0.1 {{(pid=61962) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 502.840916] env[61962]: DEBUG oslo_service.service [None req-b30c84a2-b146-4d0b-8208-942de4bf74be None None] spice.server_proxyclient_address = 127.0.0.1 {{(pid=61962) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 502.841095] env[61962]: DEBUG oslo_service.service [None req-b30c84a2-b146-4d0b-8208-942de4bf74be None None] spice.streaming_mode = None {{(pid=61962) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 502.841260] env[61962]: DEBUG oslo_service.service [None req-b30c84a2-b146-4d0b-8208-942de4bf74be None None] spice.zlib_compression = None {{(pid=61962) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 502.841429] env[61962]: DEBUG oslo_service.service [None req-b30c84a2-b146-4d0b-8208-942de4bf74be None None] upgrade_levels.baseapi = None {{(pid=61962) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 502.841602] env[61962]: DEBUG oslo_service.service [None req-b30c84a2-b146-4d0b-8208-942de4bf74be None None] upgrade_levels.compute = auto {{(pid=61962) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 502.841765] env[61962]: DEBUG oslo_service.service [None req-b30c84a2-b146-4d0b-8208-942de4bf74be None None] upgrade_levels.conductor = None {{(pid=61962) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 502.841927] env[61962]: DEBUG oslo_service.service [None req-b30c84a2-b146-4d0b-8208-942de4bf74be None None] upgrade_levels.scheduler = None {{(pid=61962) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 502.842129] env[61962]: DEBUG oslo_service.service [None req-b30c84a2-b146-4d0b-8208-942de4bf74be None None] vendordata_dynamic_auth.auth_section = None {{(pid=61962) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 502.842312] env[61962]: DEBUG oslo_service.service [None req-b30c84a2-b146-4d0b-8208-942de4bf74be None None] vendordata_dynamic_auth.auth_type = None {{(pid=61962) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 502.842477] env[61962]: DEBUG oslo_service.service [None req-b30c84a2-b146-4d0b-8208-942de4bf74be None None] vendordata_dynamic_auth.cafile = None {{(pid=61962) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 502.842640] env[61962]: DEBUG oslo_service.service [None req-b30c84a2-b146-4d0b-8208-942de4bf74be None None] vendordata_dynamic_auth.certfile = None {{(pid=61962) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 502.842838] env[61962]: DEBUG oslo_service.service [None req-b30c84a2-b146-4d0b-8208-942de4bf74be None None] vendordata_dynamic_auth.collect_timing = False {{(pid=61962) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 502.843016] env[61962]: DEBUG oslo_service.service [None req-b30c84a2-b146-4d0b-8208-942de4bf74be None None] vendordata_dynamic_auth.insecure = False {{(pid=61962) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 502.843187] env[61962]: DEBUG oslo_service.service [None req-b30c84a2-b146-4d0b-8208-942de4bf74be None None] vendordata_dynamic_auth.keyfile = None {{(pid=61962) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 502.843354] env[61962]: DEBUG oslo_service.service [None req-b30c84a2-b146-4d0b-8208-942de4bf74be None None] vendordata_dynamic_auth.split_loggers = False {{(pid=61962) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 502.843513] env[61962]: DEBUG oslo_service.service [None req-b30c84a2-b146-4d0b-8208-942de4bf74be None None] vendordata_dynamic_auth.timeout = None {{(pid=61962) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 502.843735] env[61962]: DEBUG oslo_service.service [None req-b30c84a2-b146-4d0b-8208-942de4bf74be None None] vmware.api_retry_count = 10 {{(pid=61962) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 502.843853] env[61962]: DEBUG oslo_service.service [None req-b30c84a2-b146-4d0b-8208-942de4bf74be None None] vmware.ca_file = None {{(pid=61962) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 502.844036] env[61962]: DEBUG oslo_service.service [None req-b30c84a2-b146-4d0b-8208-942de4bf74be None None] vmware.cache_prefix = devstack-image-cache {{(pid=61962) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 502.844215] env[61962]: DEBUG oslo_service.service [None req-b30c84a2-b146-4d0b-8208-942de4bf74be None None] vmware.cluster_name = testcl1 {{(pid=61962) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 502.844383] env[61962]: DEBUG oslo_service.service [None req-b30c84a2-b146-4d0b-8208-942de4bf74be None None] vmware.connection_pool_size = 10 {{(pid=61962) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 502.844545] env[61962]: DEBUG oslo_service.service [None req-b30c84a2-b146-4d0b-8208-942de4bf74be None None] vmware.console_delay_seconds = None {{(pid=61962) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 502.844714] env[61962]: DEBUG oslo_service.service [None req-b30c84a2-b146-4d0b-8208-942de4bf74be None None] vmware.datastore_regex = ^datastore.* {{(pid=61962) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 502.844927] env[61962]: DEBUG oslo_service.service [None req-b30c84a2-b146-4d0b-8208-942de4bf74be None None] vmware.host_ip = vc1.osci.c.eu-de-1.cloud.sap {{(pid=61962) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 502.845139] env[61962]: DEBUG oslo_service.service [None req-b30c84a2-b146-4d0b-8208-942de4bf74be None None] vmware.host_password = **** {{(pid=61962) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 502.845324] env[61962]: DEBUG oslo_service.service [None req-b30c84a2-b146-4d0b-8208-942de4bf74be None None] vmware.host_port = 443 {{(pid=61962) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 502.845502] env[61962]: DEBUG oslo_service.service [None req-b30c84a2-b146-4d0b-8208-942de4bf74be None None] vmware.host_username = administrator@vsphere.local {{(pid=61962) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 502.845673] env[61962]: DEBUG oslo_service.service [None req-b30c84a2-b146-4d0b-8208-942de4bf74be None None] vmware.insecure = True {{(pid=61962) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 502.845836] env[61962]: DEBUG oslo_service.service [None req-b30c84a2-b146-4d0b-8208-942de4bf74be None None] vmware.integration_bridge = None {{(pid=61962) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 502.846009] env[61962]: DEBUG oslo_service.service [None req-b30c84a2-b146-4d0b-8208-942de4bf74be None None] vmware.maximum_objects = 100 {{(pid=61962) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 502.846180] env[61962]: DEBUG oslo_service.service [None req-b30c84a2-b146-4d0b-8208-942de4bf74be None None] vmware.pbm_default_policy = None {{(pid=61962) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 502.846346] env[61962]: DEBUG oslo_service.service [None req-b30c84a2-b146-4d0b-8208-942de4bf74be None None] vmware.pbm_enabled = False {{(pid=61962) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 502.846506] env[61962]: DEBUG oslo_service.service [None req-b30c84a2-b146-4d0b-8208-942de4bf74be None None] vmware.pbm_wsdl_location = None {{(pid=61962) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 502.846677] env[61962]: DEBUG oslo_service.service [None req-b30c84a2-b146-4d0b-8208-942de4bf74be None None] vmware.serial_log_dir = /opt/vmware/vspc {{(pid=61962) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 502.846839] env[61962]: DEBUG oslo_service.service [None req-b30c84a2-b146-4d0b-8208-942de4bf74be None None] vmware.serial_port_proxy_uri = None {{(pid=61962) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 502.846999] env[61962]: DEBUG oslo_service.service [None req-b30c84a2-b146-4d0b-8208-942de4bf74be None None] vmware.serial_port_service_uri = None {{(pid=61962) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 502.847181] env[61962]: DEBUG oslo_service.service [None req-b30c84a2-b146-4d0b-8208-942de4bf74be None None] vmware.task_poll_interval = 0.5 {{(pid=61962) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 502.847355] env[61962]: DEBUG oslo_service.service [None req-b30c84a2-b146-4d0b-8208-942de4bf74be None None] vmware.use_linked_clone = False {{(pid=61962) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 502.847527] env[61962]: DEBUG oslo_service.service [None req-b30c84a2-b146-4d0b-8208-942de4bf74be None None] vmware.vnc_keymap = en-us {{(pid=61962) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 502.847696] env[61962]: DEBUG oslo_service.service [None req-b30c84a2-b146-4d0b-8208-942de4bf74be None None] vmware.vnc_port = 5900 {{(pid=61962) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 502.847863] env[61962]: DEBUG oslo_service.service [None req-b30c84a2-b146-4d0b-8208-942de4bf74be None None] vmware.vnc_port_total = 10000 {{(pid=61962) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 502.848060] env[61962]: DEBUG oslo_service.service [None req-b30c84a2-b146-4d0b-8208-942de4bf74be None None] vnc.auth_schemes = ['none'] {{(pid=61962) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 502.848265] env[61962]: DEBUG oslo_service.service [None req-b30c84a2-b146-4d0b-8208-942de4bf74be None None] vnc.enabled = False {{(pid=61962) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 502.848565] env[61962]: DEBUG oslo_service.service [None req-b30c84a2-b146-4d0b-8208-942de4bf74be None None] vnc.novncproxy_base_url = http://127.0.0.1:6080/vnc_auto.html {{(pid=61962) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 502.848756] env[61962]: DEBUG oslo_service.service [None req-b30c84a2-b146-4d0b-8208-942de4bf74be None None] vnc.novncproxy_host = 0.0.0.0 {{(pid=61962) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 502.848935] env[61962]: DEBUG oslo_service.service [None req-b30c84a2-b146-4d0b-8208-942de4bf74be None None] vnc.novncproxy_port = 6080 {{(pid=61962) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 502.849133] env[61962]: DEBUG oslo_service.service [None req-b30c84a2-b146-4d0b-8208-942de4bf74be None None] vnc.server_listen = 127.0.0.1 {{(pid=61962) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 502.849313] env[61962]: DEBUG oslo_service.service [None req-b30c84a2-b146-4d0b-8208-942de4bf74be None None] vnc.server_proxyclient_address = 127.0.0.1 {{(pid=61962) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 502.849481] env[61962]: DEBUG oslo_service.service [None req-b30c84a2-b146-4d0b-8208-942de4bf74be None None] vnc.vencrypt_ca_certs = None {{(pid=61962) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 502.849646] env[61962]: DEBUG oslo_service.service [None req-b30c84a2-b146-4d0b-8208-942de4bf74be None None] vnc.vencrypt_client_cert = None {{(pid=61962) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 502.849808] env[61962]: DEBUG oslo_service.service [None req-b30c84a2-b146-4d0b-8208-942de4bf74be None None] vnc.vencrypt_client_key = None {{(pid=61962) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 502.849994] env[61962]: DEBUG oslo_service.service [None req-b30c84a2-b146-4d0b-8208-942de4bf74be None None] workarounds.disable_compute_service_check_for_ffu = False {{(pid=61962) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 502.850175] env[61962]: DEBUG oslo_service.service [None req-b30c84a2-b146-4d0b-8208-942de4bf74be None None] workarounds.disable_deep_image_inspection = False {{(pid=61962) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 502.850340] env[61962]: DEBUG oslo_service.service [None req-b30c84a2-b146-4d0b-8208-942de4bf74be None None] workarounds.disable_fallback_pcpu_query = False {{(pid=61962) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 502.850506] env[61962]: DEBUG oslo_service.service [None req-b30c84a2-b146-4d0b-8208-942de4bf74be None None] workarounds.disable_group_policy_check_upcall = False {{(pid=61962) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 502.850669] env[61962]: DEBUG oslo_service.service [None req-b30c84a2-b146-4d0b-8208-942de4bf74be None None] workarounds.disable_libvirt_livesnapshot = False {{(pid=61962) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 502.850862] env[61962]: DEBUG oslo_service.service [None req-b30c84a2-b146-4d0b-8208-942de4bf74be None None] workarounds.disable_rootwrap = False {{(pid=61962) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 502.851042] env[61962]: DEBUG oslo_service.service [None req-b30c84a2-b146-4d0b-8208-942de4bf74be None None] workarounds.enable_numa_live_migration = False {{(pid=61962) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 502.851240] env[61962]: DEBUG oslo_service.service [None req-b30c84a2-b146-4d0b-8208-942de4bf74be None None] workarounds.enable_qemu_monitor_announce_self = False {{(pid=61962) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 502.851423] env[61962]: DEBUG oslo_service.service [None req-b30c84a2-b146-4d0b-8208-942de4bf74be None None] workarounds.ensure_libvirt_rbd_instance_dir_cleanup = False {{(pid=61962) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 502.851592] env[61962]: DEBUG oslo_service.service [None req-b30c84a2-b146-4d0b-8208-942de4bf74be None None] workarounds.handle_virt_lifecycle_events = True {{(pid=61962) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 502.851759] env[61962]: DEBUG oslo_service.service [None req-b30c84a2-b146-4d0b-8208-942de4bf74be None None] workarounds.libvirt_disable_apic = False {{(pid=61962) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 502.851924] env[61962]: DEBUG oslo_service.service [None req-b30c84a2-b146-4d0b-8208-942de4bf74be None None] workarounds.never_download_image_if_on_rbd = False {{(pid=61962) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 502.852103] env[61962]: DEBUG oslo_service.service [None req-b30c84a2-b146-4d0b-8208-942de4bf74be None None] workarounds.qemu_monitor_announce_self_count = 3 {{(pid=61962) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 502.852272] env[61962]: DEBUG oslo_service.service [None req-b30c84a2-b146-4d0b-8208-942de4bf74be None None] workarounds.qemu_monitor_announce_self_interval = 1 {{(pid=61962) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 502.852435] env[61962]: DEBUG oslo_service.service [None req-b30c84a2-b146-4d0b-8208-942de4bf74be None None] workarounds.reserve_disk_resource_for_image_cache = False {{(pid=61962) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 502.852600] env[61962]: DEBUG oslo_service.service [None req-b30c84a2-b146-4d0b-8208-942de4bf74be None None] workarounds.skip_cpu_compare_at_startup = False {{(pid=61962) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 502.852789] env[61962]: DEBUG oslo_service.service [None req-b30c84a2-b146-4d0b-8208-942de4bf74be None None] workarounds.skip_cpu_compare_on_dest = False {{(pid=61962) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 502.852965] env[61962]: DEBUG oslo_service.service [None req-b30c84a2-b146-4d0b-8208-942de4bf74be None None] workarounds.skip_hypervisor_version_check_on_lm = False {{(pid=61962) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 502.853143] env[61962]: DEBUG oslo_service.service [None req-b30c84a2-b146-4d0b-8208-942de4bf74be None None] workarounds.skip_reserve_in_use_ironic_nodes = False {{(pid=61962) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 502.853310] env[61962]: DEBUG oslo_service.service [None req-b30c84a2-b146-4d0b-8208-942de4bf74be None None] workarounds.unified_limits_count_pcpu_as_vcpu = False {{(pid=61962) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 502.853478] env[61962]: DEBUG oslo_service.service [None req-b30c84a2-b146-4d0b-8208-942de4bf74be None None] workarounds.wait_for_vif_plugged_event_during_hard_reboot = [] {{(pid=61962) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 502.853666] env[61962]: DEBUG oslo_service.service [None req-b30c84a2-b146-4d0b-8208-942de4bf74be None None] wsgi.api_paste_config = /etc/nova/api-paste.ini {{(pid=61962) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 502.853865] env[61962]: DEBUG oslo_service.service [None req-b30c84a2-b146-4d0b-8208-942de4bf74be None None] wsgi.client_socket_timeout = 900 {{(pid=61962) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 502.854096] env[61962]: DEBUG oslo_service.service [None req-b30c84a2-b146-4d0b-8208-942de4bf74be None None] wsgi.default_pool_size = 1000 {{(pid=61962) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 502.854279] env[61962]: DEBUG oslo_service.service [None req-b30c84a2-b146-4d0b-8208-942de4bf74be None None] wsgi.keep_alive = True {{(pid=61962) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 502.854451] env[61962]: DEBUG oslo_service.service [None req-b30c84a2-b146-4d0b-8208-942de4bf74be None None] wsgi.max_header_line = 16384 {{(pid=61962) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 502.854618] env[61962]: DEBUG oslo_service.service [None req-b30c84a2-b146-4d0b-8208-942de4bf74be None None] wsgi.secure_proxy_ssl_header = None {{(pid=61962) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 502.854783] env[61962]: DEBUG oslo_service.service [None req-b30c84a2-b146-4d0b-8208-942de4bf74be None None] wsgi.ssl_ca_file = None {{(pid=61962) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 502.854946] env[61962]: DEBUG oslo_service.service [None req-b30c84a2-b146-4d0b-8208-942de4bf74be None None] wsgi.ssl_cert_file = None {{(pid=61962) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 502.855123] env[61962]: DEBUG oslo_service.service [None req-b30c84a2-b146-4d0b-8208-942de4bf74be None None] wsgi.ssl_key_file = None {{(pid=61962) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 502.855292] env[61962]: DEBUG oslo_service.service [None req-b30c84a2-b146-4d0b-8208-942de4bf74be None None] wsgi.tcp_keepidle = 600 {{(pid=61962) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 502.855471] env[61962]: DEBUG oslo_service.service [None req-b30c84a2-b146-4d0b-8208-942de4bf74be None None] wsgi.wsgi_log_format = %(client_ip)s "%(request_line)s" status: %(status_code)s len: %(body_length)s time: %(wall_seconds).7f {{(pid=61962) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 502.855640] env[61962]: DEBUG oslo_service.service [None req-b30c84a2-b146-4d0b-8208-942de4bf74be None None] zvm.ca_file = None {{(pid=61962) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 502.855805] env[61962]: DEBUG oslo_service.service [None req-b30c84a2-b146-4d0b-8208-942de4bf74be None None] zvm.cloud_connector_url = None {{(pid=61962) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 502.856116] env[61962]: DEBUG oslo_service.service [None req-b30c84a2-b146-4d0b-8208-942de4bf74be None None] zvm.image_tmp_path = /opt/stack/data/n-cpu-1/images {{(pid=61962) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 502.856296] env[61962]: DEBUG oslo_service.service [None req-b30c84a2-b146-4d0b-8208-942de4bf74be None None] zvm.reachable_timeout = 300 {{(pid=61962) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 502.856483] env[61962]: DEBUG oslo_service.service [None req-b30c84a2-b146-4d0b-8208-942de4bf74be None None] oslo_policy.enforce_new_defaults = True {{(pid=61962) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 502.856658] env[61962]: DEBUG oslo_service.service [None req-b30c84a2-b146-4d0b-8208-942de4bf74be None None] oslo_policy.enforce_scope = True {{(pid=61962) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 502.856844] env[61962]: DEBUG oslo_service.service [None req-b30c84a2-b146-4d0b-8208-942de4bf74be None None] oslo_policy.policy_default_rule = default {{(pid=61962) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 502.857060] env[61962]: DEBUG oslo_service.service [None req-b30c84a2-b146-4d0b-8208-942de4bf74be None None] oslo_policy.policy_dirs = ['policy.d'] {{(pid=61962) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 502.857246] env[61962]: DEBUG oslo_service.service [None req-b30c84a2-b146-4d0b-8208-942de4bf74be None None] oslo_policy.policy_file = policy.yaml {{(pid=61962) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 502.857422] env[61962]: DEBUG oslo_service.service [None req-b30c84a2-b146-4d0b-8208-942de4bf74be None None] oslo_policy.remote_content_type = application/x-www-form-urlencoded {{(pid=61962) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 502.857588] env[61962]: DEBUG oslo_service.service [None req-b30c84a2-b146-4d0b-8208-942de4bf74be None None] oslo_policy.remote_ssl_ca_crt_file = None {{(pid=61962) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 502.857751] env[61962]: DEBUG oslo_service.service [None req-b30c84a2-b146-4d0b-8208-942de4bf74be None None] oslo_policy.remote_ssl_client_crt_file = None {{(pid=61962) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 502.857913] env[61962]: DEBUG oslo_service.service [None req-b30c84a2-b146-4d0b-8208-942de4bf74be None None] oslo_policy.remote_ssl_client_key_file = None {{(pid=61962) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 502.858092] env[61962]: DEBUG oslo_service.service [None req-b30c84a2-b146-4d0b-8208-942de4bf74be None None] oslo_policy.remote_ssl_verify_server_crt = False {{(pid=61962) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 502.858268] env[61962]: DEBUG oslo_service.service [None req-b30c84a2-b146-4d0b-8208-942de4bf74be None None] oslo_versionedobjects.fatal_exception_format_errors = False {{(pid=61962) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 502.858447] env[61962]: DEBUG oslo_service.service [None req-b30c84a2-b146-4d0b-8208-942de4bf74be None None] oslo_middleware.http_basic_auth_user_file = /etc/htpasswd {{(pid=61962) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 502.858625] env[61962]: DEBUG oslo_service.service [None req-b30c84a2-b146-4d0b-8208-942de4bf74be None None] profiler.connection_string = messaging:// {{(pid=61962) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 502.858796] env[61962]: DEBUG oslo_service.service [None req-b30c84a2-b146-4d0b-8208-942de4bf74be None None] profiler.enabled = False {{(pid=61962) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 502.858970] env[61962]: DEBUG oslo_service.service [None req-b30c84a2-b146-4d0b-8208-942de4bf74be None None] profiler.es_doc_type = notification {{(pid=61962) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 502.859152] env[61962]: DEBUG oslo_service.service [None req-b30c84a2-b146-4d0b-8208-942de4bf74be None None] profiler.es_scroll_size = 10000 {{(pid=61962) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 502.859325] env[61962]: DEBUG oslo_service.service [None req-b30c84a2-b146-4d0b-8208-942de4bf74be None None] profiler.es_scroll_time = 2m {{(pid=61962) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 502.859489] env[61962]: DEBUG oslo_service.service [None req-b30c84a2-b146-4d0b-8208-942de4bf74be None None] profiler.filter_error_trace = False {{(pid=61962) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 502.859662] env[61962]: DEBUG oslo_service.service [None req-b30c84a2-b146-4d0b-8208-942de4bf74be None None] profiler.hmac_keys = **** {{(pid=61962) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 502.859835] env[61962]: DEBUG oslo_service.service [None req-b30c84a2-b146-4d0b-8208-942de4bf74be None None] profiler.sentinel_service_name = mymaster {{(pid=61962) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 502.860039] env[61962]: DEBUG oslo_service.service [None req-b30c84a2-b146-4d0b-8208-942de4bf74be None None] profiler.socket_timeout = 0.1 {{(pid=61962) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 502.860219] env[61962]: DEBUG oslo_service.service [None req-b30c84a2-b146-4d0b-8208-942de4bf74be None None] profiler.trace_requests = False {{(pid=61962) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 502.860388] env[61962]: DEBUG oslo_service.service [None req-b30c84a2-b146-4d0b-8208-942de4bf74be None None] profiler.trace_sqlalchemy = False {{(pid=61962) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 502.860573] env[61962]: DEBUG oslo_service.service [None req-b30c84a2-b146-4d0b-8208-942de4bf74be None None] profiler_jaeger.process_tags = {} {{(pid=61962) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 502.860763] env[61962]: DEBUG oslo_service.service [None req-b30c84a2-b146-4d0b-8208-942de4bf74be None None] profiler_jaeger.service_name_prefix = None {{(pid=61962) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 502.860943] env[61962]: DEBUG oslo_service.service [None req-b30c84a2-b146-4d0b-8208-942de4bf74be None None] profiler_otlp.service_name_prefix = None {{(pid=61962) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 502.861127] env[61962]: DEBUG oslo_service.service [None req-b30c84a2-b146-4d0b-8208-942de4bf74be None None] remote_debug.host = None {{(pid=61962) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 502.861291] env[61962]: DEBUG oslo_service.service [None req-b30c84a2-b146-4d0b-8208-942de4bf74be None None] remote_debug.port = None {{(pid=61962) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 502.861473] env[61962]: DEBUG oslo_service.service [None req-b30c84a2-b146-4d0b-8208-942de4bf74be None None] oslo_messaging_rabbit.amqp_auto_delete = False {{(pid=61962) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 502.861640] env[61962]: DEBUG oslo_service.service [None req-b30c84a2-b146-4d0b-8208-942de4bf74be None None] oslo_messaging_rabbit.amqp_durable_queues = False {{(pid=61962) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 502.861807] env[61962]: DEBUG oslo_service.service [None req-b30c84a2-b146-4d0b-8208-942de4bf74be None None] oslo_messaging_rabbit.conn_pool_min_size = 2 {{(pid=61962) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 502.861973] env[61962]: DEBUG oslo_service.service [None req-b30c84a2-b146-4d0b-8208-942de4bf74be None None] oslo_messaging_rabbit.conn_pool_ttl = 1200 {{(pid=61962) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 502.862154] env[61962]: DEBUG oslo_service.service [None req-b30c84a2-b146-4d0b-8208-942de4bf74be None None] oslo_messaging_rabbit.direct_mandatory_flag = True {{(pid=61962) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 502.862320] env[61962]: DEBUG oslo_service.service [None req-b30c84a2-b146-4d0b-8208-942de4bf74be None None] oslo_messaging_rabbit.enable_cancel_on_failover = False {{(pid=61962) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 502.862486] env[61962]: DEBUG oslo_service.service [None req-b30c84a2-b146-4d0b-8208-942de4bf74be None None] oslo_messaging_rabbit.heartbeat_in_pthread = False {{(pid=61962) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 502.862651] env[61962]: DEBUG oslo_service.service [None req-b30c84a2-b146-4d0b-8208-942de4bf74be None None] oslo_messaging_rabbit.heartbeat_rate = 3 {{(pid=61962) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 502.862845] env[61962]: DEBUG oslo_service.service [None req-b30c84a2-b146-4d0b-8208-942de4bf74be None None] oslo_messaging_rabbit.heartbeat_timeout_threshold = 60 {{(pid=61962) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 502.863064] env[61962]: DEBUG oslo_service.service [None req-b30c84a2-b146-4d0b-8208-942de4bf74be None None] oslo_messaging_rabbit.hostname = devstack {{(pid=61962) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 502.863239] env[61962]: DEBUG oslo_service.service [None req-b30c84a2-b146-4d0b-8208-942de4bf74be None None] oslo_messaging_rabbit.kombu_compression = None {{(pid=61962) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 502.863414] env[61962]: DEBUG oslo_service.service [None req-b30c84a2-b146-4d0b-8208-942de4bf74be None None] oslo_messaging_rabbit.kombu_failover_strategy = round-robin {{(pid=61962) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 502.863585] env[61962]: DEBUG oslo_service.service [None req-b30c84a2-b146-4d0b-8208-942de4bf74be None None] oslo_messaging_rabbit.kombu_missing_consumer_retry_timeout = 60 {{(pid=61962) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 502.863758] env[61962]: DEBUG oslo_service.service [None req-b30c84a2-b146-4d0b-8208-942de4bf74be None None] oslo_messaging_rabbit.kombu_reconnect_delay = 1.0 {{(pid=61962) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 502.863964] env[61962]: DEBUG oslo_service.service [None req-b30c84a2-b146-4d0b-8208-942de4bf74be None None] oslo_messaging_rabbit.processname = nova-compute {{(pid=61962) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 502.864116] env[61962]: DEBUG oslo_service.service [None req-b30c84a2-b146-4d0b-8208-942de4bf74be None None] oslo_messaging_rabbit.rabbit_ha_queues = False {{(pid=61962) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 502.864285] env[61962]: DEBUG oslo_service.service [None req-b30c84a2-b146-4d0b-8208-942de4bf74be None None] oslo_messaging_rabbit.rabbit_interval_max = 30 {{(pid=61962) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 502.864463] env[61962]: DEBUG oslo_service.service [None req-b30c84a2-b146-4d0b-8208-942de4bf74be None None] oslo_messaging_rabbit.rabbit_login_method = AMQPLAIN {{(pid=61962) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 502.864629] env[61962]: DEBUG oslo_service.service [None req-b30c84a2-b146-4d0b-8208-942de4bf74be None None] oslo_messaging_rabbit.rabbit_qos_prefetch_count = 0 {{(pid=61962) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 502.864795] env[61962]: DEBUG oslo_service.service [None req-b30c84a2-b146-4d0b-8208-942de4bf74be None None] oslo_messaging_rabbit.rabbit_quorum_delivery_limit = 0 {{(pid=61962) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 502.864963] env[61962]: DEBUG oslo_service.service [None req-b30c84a2-b146-4d0b-8208-942de4bf74be None None] oslo_messaging_rabbit.rabbit_quorum_max_memory_bytes = 0 {{(pid=61962) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 502.865165] env[61962]: DEBUG oslo_service.service [None req-b30c84a2-b146-4d0b-8208-942de4bf74be None None] oslo_messaging_rabbit.rabbit_quorum_max_memory_length = 0 {{(pid=61962) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 502.865340] env[61962]: DEBUG oslo_service.service [None req-b30c84a2-b146-4d0b-8208-942de4bf74be None None] oslo_messaging_rabbit.rabbit_quorum_queue = False {{(pid=61962) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 502.865512] env[61962]: DEBUG oslo_service.service [None req-b30c84a2-b146-4d0b-8208-942de4bf74be None None] oslo_messaging_rabbit.rabbit_retry_backoff = 2 {{(pid=61962) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 502.865678] env[61962]: DEBUG oslo_service.service [None req-b30c84a2-b146-4d0b-8208-942de4bf74be None None] oslo_messaging_rabbit.rabbit_retry_interval = 1 {{(pid=61962) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 502.865840] env[61962]: DEBUG oslo_service.service [None req-b30c84a2-b146-4d0b-8208-942de4bf74be None None] oslo_messaging_rabbit.rabbit_stream_fanout = False {{(pid=61962) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 502.866037] env[61962]: DEBUG oslo_service.service [None req-b30c84a2-b146-4d0b-8208-942de4bf74be None None] oslo_messaging_rabbit.rabbit_transient_queues_ttl = 1800 {{(pid=61962) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 502.866219] env[61962]: DEBUG oslo_service.service [None req-b30c84a2-b146-4d0b-8208-942de4bf74be None None] oslo_messaging_rabbit.rabbit_transient_quorum_queue = False {{(pid=61962) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 502.866395] env[61962]: DEBUG oslo_service.service [None req-b30c84a2-b146-4d0b-8208-942de4bf74be None None] oslo_messaging_rabbit.rpc_conn_pool_size = 30 {{(pid=61962) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 502.866567] env[61962]: DEBUG oslo_service.service [None req-b30c84a2-b146-4d0b-8208-942de4bf74be None None] oslo_messaging_rabbit.ssl = False {{(pid=61962) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 502.866748] env[61962]: DEBUG oslo_service.service [None req-b30c84a2-b146-4d0b-8208-942de4bf74be None None] oslo_messaging_rabbit.ssl_ca_file = {{(pid=61962) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 502.866924] env[61962]: DEBUG oslo_service.service [None req-b30c84a2-b146-4d0b-8208-942de4bf74be None None] oslo_messaging_rabbit.ssl_cert_file = {{(pid=61962) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 502.867103] env[61962]: DEBUG oslo_service.service [None req-b30c84a2-b146-4d0b-8208-942de4bf74be None None] oslo_messaging_rabbit.ssl_enforce_fips_mode = False {{(pid=61962) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 502.867281] env[61962]: DEBUG oslo_service.service [None req-b30c84a2-b146-4d0b-8208-942de4bf74be None None] oslo_messaging_rabbit.ssl_key_file = {{(pid=61962) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 502.867455] env[61962]: DEBUG oslo_service.service [None req-b30c84a2-b146-4d0b-8208-942de4bf74be None None] oslo_messaging_rabbit.ssl_version = {{(pid=61962) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 502.867621] env[61962]: DEBUG oslo_service.service [None req-b30c84a2-b146-4d0b-8208-942de4bf74be None None] oslo_messaging_rabbit.use_queue_manager = False {{(pid=61962) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 502.867816] env[61962]: DEBUG oslo_service.service [None req-b30c84a2-b146-4d0b-8208-942de4bf74be None None] oslo_messaging_notifications.driver = ['messagingv2'] {{(pid=61962) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 502.867988] env[61962]: DEBUG oslo_service.service [None req-b30c84a2-b146-4d0b-8208-942de4bf74be None None] oslo_messaging_notifications.retry = -1 {{(pid=61962) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 502.868193] env[61962]: DEBUG oslo_service.service [None req-b30c84a2-b146-4d0b-8208-942de4bf74be None None] oslo_messaging_notifications.topics = ['notifications'] {{(pid=61962) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 502.868373] env[61962]: DEBUG oslo_service.service [None req-b30c84a2-b146-4d0b-8208-942de4bf74be None None] oslo_messaging_notifications.transport_url = **** {{(pid=61962) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 502.868553] env[61962]: DEBUG oslo_service.service [None req-b30c84a2-b146-4d0b-8208-942de4bf74be None None] oslo_limit.auth_section = None {{(pid=61962) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 502.868722] env[61962]: DEBUG oslo_service.service [None req-b30c84a2-b146-4d0b-8208-942de4bf74be None None] oslo_limit.auth_type = None {{(pid=61962) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 502.868887] env[61962]: DEBUG oslo_service.service [None req-b30c84a2-b146-4d0b-8208-942de4bf74be None None] oslo_limit.cafile = None {{(pid=61962) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 502.869083] env[61962]: DEBUG oslo_service.service [None req-b30c84a2-b146-4d0b-8208-942de4bf74be None None] oslo_limit.certfile = None {{(pid=61962) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 502.869262] env[61962]: DEBUG oslo_service.service [None req-b30c84a2-b146-4d0b-8208-942de4bf74be None None] oslo_limit.collect_timing = False {{(pid=61962) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 502.869425] env[61962]: DEBUG oslo_service.service [None req-b30c84a2-b146-4d0b-8208-942de4bf74be None None] oslo_limit.connect_retries = None {{(pid=61962) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 502.869588] env[61962]: DEBUG oslo_service.service [None req-b30c84a2-b146-4d0b-8208-942de4bf74be None None] oslo_limit.connect_retry_delay = None {{(pid=61962) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 502.869751] env[61962]: DEBUG oslo_service.service [None req-b30c84a2-b146-4d0b-8208-942de4bf74be None None] oslo_limit.endpoint_id = None {{(pid=61962) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 502.869910] env[61962]: DEBUG oslo_service.service [None req-b30c84a2-b146-4d0b-8208-942de4bf74be None None] oslo_limit.endpoint_override = None {{(pid=61962) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 502.870087] env[61962]: DEBUG oslo_service.service [None req-b30c84a2-b146-4d0b-8208-942de4bf74be None None] oslo_limit.insecure = False {{(pid=61962) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 502.870250] env[61962]: DEBUG oslo_service.service [None req-b30c84a2-b146-4d0b-8208-942de4bf74be None None] oslo_limit.keyfile = None {{(pid=61962) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 502.870409] env[61962]: DEBUG oslo_service.service [None req-b30c84a2-b146-4d0b-8208-942de4bf74be None None] oslo_limit.max_version = None {{(pid=61962) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 502.870574] env[61962]: DEBUG oslo_service.service [None req-b30c84a2-b146-4d0b-8208-942de4bf74be None None] oslo_limit.min_version = None {{(pid=61962) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 502.870741] env[61962]: DEBUG oslo_service.service [None req-b30c84a2-b146-4d0b-8208-942de4bf74be None None] oslo_limit.region_name = None {{(pid=61962) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 502.870926] env[61962]: DEBUG oslo_service.service [None req-b30c84a2-b146-4d0b-8208-942de4bf74be None None] oslo_limit.retriable_status_codes = None {{(pid=61962) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 502.871103] env[61962]: DEBUG oslo_service.service [None req-b30c84a2-b146-4d0b-8208-942de4bf74be None None] oslo_limit.service_name = None {{(pid=61962) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 502.871268] env[61962]: DEBUG oslo_service.service [None req-b30c84a2-b146-4d0b-8208-942de4bf74be None None] oslo_limit.service_type = None {{(pid=61962) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 502.871433] env[61962]: DEBUG oslo_service.service [None req-b30c84a2-b146-4d0b-8208-942de4bf74be None None] oslo_limit.split_loggers = False {{(pid=61962) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 502.871595] env[61962]: DEBUG oslo_service.service [None req-b30c84a2-b146-4d0b-8208-942de4bf74be None None] oslo_limit.status_code_retries = None {{(pid=61962) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 502.871757] env[61962]: DEBUG oslo_service.service [None req-b30c84a2-b146-4d0b-8208-942de4bf74be None None] oslo_limit.status_code_retry_delay = None {{(pid=61962) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 502.871919] env[61962]: DEBUG oslo_service.service [None req-b30c84a2-b146-4d0b-8208-942de4bf74be None None] oslo_limit.timeout = None {{(pid=61962) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 502.872120] env[61962]: DEBUG oslo_service.service [None req-b30c84a2-b146-4d0b-8208-942de4bf74be None None] oslo_limit.valid_interfaces = None {{(pid=61962) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 502.872293] env[61962]: DEBUG oslo_service.service [None req-b30c84a2-b146-4d0b-8208-942de4bf74be None None] oslo_limit.version = None {{(pid=61962) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 502.872463] env[61962]: DEBUG oslo_service.service [None req-b30c84a2-b146-4d0b-8208-942de4bf74be None None] oslo_reports.file_event_handler = None {{(pid=61962) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 502.872630] env[61962]: DEBUG oslo_service.service [None req-b30c84a2-b146-4d0b-8208-942de4bf74be None None] oslo_reports.file_event_handler_interval = 1 {{(pid=61962) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 502.872831] env[61962]: DEBUG oslo_service.service [None req-b30c84a2-b146-4d0b-8208-942de4bf74be None None] oslo_reports.log_dir = None {{(pid=61962) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 502.873015] env[61962]: DEBUG oslo_service.service [None req-b30c84a2-b146-4d0b-8208-942de4bf74be None None] vif_plug_linux_bridge_privileged.capabilities = [12] {{(pid=61962) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 502.873196] env[61962]: DEBUG oslo_service.service [None req-b30c84a2-b146-4d0b-8208-942de4bf74be None None] vif_plug_linux_bridge_privileged.group = None {{(pid=61962) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 502.873369] env[61962]: DEBUG oslo_service.service [None req-b30c84a2-b146-4d0b-8208-942de4bf74be None None] vif_plug_linux_bridge_privileged.helper_command = None {{(pid=61962) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 502.873542] env[61962]: DEBUG oslo_service.service [None req-b30c84a2-b146-4d0b-8208-942de4bf74be None None] vif_plug_linux_bridge_privileged.logger_name = oslo_privsep.daemon {{(pid=61962) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 502.873711] env[61962]: DEBUG oslo_service.service [None req-b30c84a2-b146-4d0b-8208-942de4bf74be None None] vif_plug_linux_bridge_privileged.thread_pool_size = 8 {{(pid=61962) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 502.873879] env[61962]: DEBUG oslo_service.service [None req-b30c84a2-b146-4d0b-8208-942de4bf74be None None] vif_plug_linux_bridge_privileged.user = None {{(pid=61962) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 502.874074] env[61962]: DEBUG oslo_service.service [None req-b30c84a2-b146-4d0b-8208-942de4bf74be None None] vif_plug_ovs_privileged.capabilities = [12, 1] {{(pid=61962) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 502.874229] env[61962]: DEBUG oslo_service.service [None req-b30c84a2-b146-4d0b-8208-942de4bf74be None None] vif_plug_ovs_privileged.group = None {{(pid=61962) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 502.874391] env[61962]: DEBUG oslo_service.service [None req-b30c84a2-b146-4d0b-8208-942de4bf74be None None] vif_plug_ovs_privileged.helper_command = None {{(pid=61962) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 502.874559] env[61962]: DEBUG oslo_service.service [None req-b30c84a2-b146-4d0b-8208-942de4bf74be None None] vif_plug_ovs_privileged.logger_name = oslo_privsep.daemon {{(pid=61962) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 502.874726] env[61962]: DEBUG oslo_service.service [None req-b30c84a2-b146-4d0b-8208-942de4bf74be None None] vif_plug_ovs_privileged.thread_pool_size = 8 {{(pid=61962) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 502.874890] env[61962]: DEBUG oslo_service.service [None req-b30c84a2-b146-4d0b-8208-942de4bf74be None None] vif_plug_ovs_privileged.user = None {{(pid=61962) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 502.875095] env[61962]: DEBUG oslo_service.service [None req-b30c84a2-b146-4d0b-8208-942de4bf74be None None] os_vif_linux_bridge.flat_interface = None {{(pid=61962) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 502.875299] env[61962]: DEBUG oslo_service.service [None req-b30c84a2-b146-4d0b-8208-942de4bf74be None None] os_vif_linux_bridge.forward_bridge_interface = ['all'] {{(pid=61962) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 502.875479] env[61962]: DEBUG oslo_service.service [None req-b30c84a2-b146-4d0b-8208-942de4bf74be None None] os_vif_linux_bridge.iptables_bottom_regex = {{(pid=61962) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 502.875653] env[61962]: DEBUG oslo_service.service [None req-b30c84a2-b146-4d0b-8208-942de4bf74be None None] os_vif_linux_bridge.iptables_drop_action = DROP {{(pid=61962) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 502.875828] env[61962]: DEBUG oslo_service.service [None req-b30c84a2-b146-4d0b-8208-942de4bf74be None None] os_vif_linux_bridge.iptables_top_regex = {{(pid=61962) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 502.876019] env[61962]: DEBUG oslo_service.service [None req-b30c84a2-b146-4d0b-8208-942de4bf74be None None] os_vif_linux_bridge.network_device_mtu = 1500 {{(pid=61962) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 502.876183] env[61962]: DEBUG oslo_service.service [None req-b30c84a2-b146-4d0b-8208-942de4bf74be None None] os_vif_linux_bridge.use_ipv6 = False {{(pid=61962) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 502.876350] env[61962]: DEBUG oslo_service.service [None req-b30c84a2-b146-4d0b-8208-942de4bf74be None None] os_vif_linux_bridge.vlan_interface = None {{(pid=61962) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 502.876530] env[61962]: DEBUG oslo_service.service [None req-b30c84a2-b146-4d0b-8208-942de4bf74be None None] os_vif_ovs.default_qos_type = linux-noop {{(pid=61962) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 502.876705] env[61962]: DEBUG oslo_service.service [None req-b30c84a2-b146-4d0b-8208-942de4bf74be None None] os_vif_ovs.isolate_vif = False {{(pid=61962) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 502.876885] env[61962]: DEBUG oslo_service.service [None req-b30c84a2-b146-4d0b-8208-942de4bf74be None None] os_vif_ovs.network_device_mtu = 1500 {{(pid=61962) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 502.877066] env[61962]: DEBUG oslo_service.service [None req-b30c84a2-b146-4d0b-8208-942de4bf74be None None] os_vif_ovs.ovs_vsctl_timeout = 120 {{(pid=61962) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 502.877241] env[61962]: DEBUG oslo_service.service [None req-b30c84a2-b146-4d0b-8208-942de4bf74be None None] os_vif_ovs.ovsdb_connection = tcp:127.0.0.1:6640 {{(pid=61962) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 502.877419] env[61962]: DEBUG oslo_service.service [None req-b30c84a2-b146-4d0b-8208-942de4bf74be None None] os_vif_ovs.ovsdb_interface = native {{(pid=61962) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 502.877587] env[61962]: DEBUG oslo_service.service [None req-b30c84a2-b146-4d0b-8208-942de4bf74be None None] os_vif_ovs.per_port_bridge = False {{(pid=61962) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 502.877758] env[61962]: DEBUG oslo_service.service [None req-b30c84a2-b146-4d0b-8208-942de4bf74be None None] os_brick.lock_path = None {{(pid=61962) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 502.877927] env[61962]: DEBUG oslo_service.service [None req-b30c84a2-b146-4d0b-8208-942de4bf74be None None] os_brick.wait_mpath_device_attempts = 4 {{(pid=61962) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 502.878125] env[61962]: DEBUG oslo_service.service [None req-b30c84a2-b146-4d0b-8208-942de4bf74be None None] os_brick.wait_mpath_device_interval = 1 {{(pid=61962) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 502.878313] env[61962]: DEBUG oslo_service.service [None req-b30c84a2-b146-4d0b-8208-942de4bf74be None None] privsep_osbrick.capabilities = [21] {{(pid=61962) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 502.878477] env[61962]: DEBUG oslo_service.service [None req-b30c84a2-b146-4d0b-8208-942de4bf74be None None] privsep_osbrick.group = None {{(pid=61962) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 502.878639] env[61962]: DEBUG oslo_service.service [None req-b30c84a2-b146-4d0b-8208-942de4bf74be None None] privsep_osbrick.helper_command = None {{(pid=61962) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 502.878806] env[61962]: DEBUG oslo_service.service [None req-b30c84a2-b146-4d0b-8208-942de4bf74be None None] privsep_osbrick.logger_name = os_brick.privileged {{(pid=61962) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 502.878974] env[61962]: DEBUG oslo_service.service [None req-b30c84a2-b146-4d0b-8208-942de4bf74be None None] privsep_osbrick.thread_pool_size = 8 {{(pid=61962) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 502.879150] env[61962]: DEBUG oslo_service.service [None req-b30c84a2-b146-4d0b-8208-942de4bf74be None None] privsep_osbrick.user = None {{(pid=61962) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 502.879326] env[61962]: DEBUG oslo_service.service [None req-b30c84a2-b146-4d0b-8208-942de4bf74be None None] nova_sys_admin.capabilities = [0, 1, 2, 3, 12, 21] {{(pid=61962) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 502.879487] env[61962]: DEBUG oslo_service.service [None req-b30c84a2-b146-4d0b-8208-942de4bf74be None None] nova_sys_admin.group = None {{(pid=61962) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 502.879648] env[61962]: DEBUG oslo_service.service [None req-b30c84a2-b146-4d0b-8208-942de4bf74be None None] nova_sys_admin.helper_command = None {{(pid=61962) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 502.879815] env[61962]: DEBUG oslo_service.service [None req-b30c84a2-b146-4d0b-8208-942de4bf74be None None] nova_sys_admin.logger_name = oslo_privsep.daemon {{(pid=61962) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 502.879982] env[61962]: DEBUG oslo_service.service [None req-b30c84a2-b146-4d0b-8208-942de4bf74be None None] nova_sys_admin.thread_pool_size = 8 {{(pid=61962) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 502.880159] env[61962]: DEBUG oslo_service.service [None req-b30c84a2-b146-4d0b-8208-942de4bf74be None None] nova_sys_admin.user = None {{(pid=61962) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 502.880293] env[61962]: DEBUG oslo_service.service [None req-b30c84a2-b146-4d0b-8208-942de4bf74be None None] ******************************************************************************** {{(pid=61962) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2624}} [ 502.881133] env[61962]: INFO nova.service [-] Starting compute node (version 0.0.1) [ 502.892170] env[61962]: DEBUG nova.virt.vmwareapi.vmops [None req-84155d27-a27a-417d-8737-9f1f802599e2 None None] Getting list of instances from cluster (obj){ [ 502.892170] env[61962]: value = "domain-c8" [ 502.892170] env[61962]: _type = "ClusterComputeResource" [ 502.892170] env[61962]: } {{(pid=61962) list_instances /opt/stack/nova/nova/virt/vmwareapi/vmops.py:2122}} [ 502.893558] env[61962]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c2f93cd4-5ccf-467e-9a00-d93af4a511e0 {{(pid=61962) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 502.903340] env[61962]: DEBUG nova.virt.vmwareapi.vmops [None req-84155d27-a27a-417d-8737-9f1f802599e2 None None] Got total of 0 instances {{(pid=61962) list_instances /opt/stack/nova/nova/virt/vmwareapi/vmops.py:2131}} [ 502.904037] env[61962]: WARNING nova.virt.vmwareapi.driver [None req-84155d27-a27a-417d-8737-9f1f802599e2 None None] The vmwareapi driver is not tested by the OpenStack project nor does it have clear maintainer(s) and thus its quality can not be ensured. It should be considered experimental and may be removed in a future release. If you are using the driver in production please let us know via the openstack-discuss mailing list. [ 502.904516] env[61962]: INFO nova.virt.node [None req-84155d27-a27a-417d-8737-9f1f802599e2 None None] Generated node identity 5a20dc57-fddd-49ec-bab5-953a03eebaa1 [ 502.904757] env[61962]: INFO nova.virt.node [None req-84155d27-a27a-417d-8737-9f1f802599e2 None None] Wrote node identity 5a20dc57-fddd-49ec-bab5-953a03eebaa1 to /opt/stack/data/n-cpu-1/compute_id [ 502.918917] env[61962]: WARNING nova.compute.manager [None req-84155d27-a27a-417d-8737-9f1f802599e2 None None] Compute nodes ['5a20dc57-fddd-49ec-bab5-953a03eebaa1'] for host cpu-1 were not found in the database. If this is the first time this service is starting on this host, then you can ignore this warning. [ 502.957855] env[61962]: INFO nova.compute.manager [None req-84155d27-a27a-417d-8737-9f1f802599e2 None None] Looking for unclaimed instances stuck in BUILDING status for nodes managed by this host [ 502.987282] env[61962]: WARNING nova.compute.manager [None req-84155d27-a27a-417d-8737-9f1f802599e2 None None] No compute node record found for host cpu-1. If this is the first time this service is starting on this host, then you can ignore this warning.: nova.exception_Remote.ComputeHostNotFound_Remote: Compute host cpu-1 could not be found. [ 502.987527] env[61962]: DEBUG oslo_concurrency.lockutils [None req-84155d27-a27a-417d-8737-9f1f802599e2 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=61962) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 502.987745] env[61962]: DEBUG oslo_concurrency.lockutils [None req-84155d27-a27a-417d-8737-9f1f802599e2 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=61962) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 502.987892] env[61962]: DEBUG oslo_concurrency.lockutils [None req-84155d27-a27a-417d-8737-9f1f802599e2 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=61962) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 502.988056] env[61962]: DEBUG nova.compute.resource_tracker [None req-84155d27-a27a-417d-8737-9f1f802599e2 None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=61962) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 502.989195] env[61962]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1b761f33-8944-4234-8f06-76907482ebde {{(pid=61962) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 502.998493] env[61962]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-49c7f3ab-0ef0-4832-af8b-0129e77055c7 {{(pid=61962) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 503.014438] env[61962]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-12449e6e-5048-415d-81ae-d38131eaae8a {{(pid=61962) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 503.021944] env[61962]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9e11ff24-6737-427a-bf59-2e6e7a9f75cc {{(pid=61962) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 503.054434] env[61962]: DEBUG nova.compute.resource_tracker [None req-84155d27-a27a-417d-8737-9f1f802599e2 None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=180583MB free_disk=96GB free_vcpus=48 pci_devices=None {{(pid=61962) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 503.054600] env[61962]: DEBUG oslo_concurrency.lockutils [None req-84155d27-a27a-417d-8737-9f1f802599e2 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=61962) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 503.054774] env[61962]: DEBUG oslo_concurrency.lockutils [None req-84155d27-a27a-417d-8737-9f1f802599e2 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=61962) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 503.068667] env[61962]: WARNING nova.compute.resource_tracker [None req-84155d27-a27a-417d-8737-9f1f802599e2 None None] No compute node record for cpu-1:5a20dc57-fddd-49ec-bab5-953a03eebaa1: nova.exception_Remote.ComputeHostNotFound_Remote: Compute host 5a20dc57-fddd-49ec-bab5-953a03eebaa1 could not be found. [ 503.084038] env[61962]: INFO nova.compute.resource_tracker [None req-84155d27-a27a-417d-8737-9f1f802599e2 None None] Compute node record created for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 with uuid: 5a20dc57-fddd-49ec-bab5-953a03eebaa1 [ 503.143106] env[61962]: DEBUG nova.compute.resource_tracker [None req-84155d27-a27a-417d-8737-9f1f802599e2 None None] Total usable vcpus: 48, total allocated vcpus: 0 {{(pid=61962) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 503.143106] env[61962]: DEBUG nova.compute.resource_tracker [None req-84155d27-a27a-417d-8737-9f1f802599e2 None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=512MB phys_disk=100GB used_disk=0GB total_vcpus=48 used_vcpus=0 pci_stats=[] stats={'failed_builds': '0'} {{(pid=61962) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 503.245205] env[61962]: INFO nova.scheduler.client.report [None req-84155d27-a27a-417d-8737-9f1f802599e2 None None] [req-69c5079d-7911-4045-a1fd-096d93ae67a7] Created resource provider record via placement API for resource provider with UUID 5a20dc57-fddd-49ec-bab5-953a03eebaa1 and name domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28. [ 503.263475] env[61962]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6b152a60-a280-42ef-b73d-7141bf620ebe {{(pid=61962) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 503.271999] env[61962]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-48d1c396-8b39-4da7-8654-54d89d52b0e4 {{(pid=61962) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 503.305598] env[61962]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-19ff6c81-0eba-43d2-a972-b73c257c04c1 {{(pid=61962) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 503.313789] env[61962]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1635b1c4-3bbf-4099-b759-a6ffaec9052f {{(pid=61962) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 503.328533] env[61962]: DEBUG nova.compute.provider_tree [None req-84155d27-a27a-417d-8737-9f1f802599e2 None None] Updating inventory in ProviderTree for provider 5a20dc57-fddd-49ec-bab5-953a03eebaa1 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 96, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61962) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 503.370830] env[61962]: DEBUG nova.scheduler.client.report [None req-84155d27-a27a-417d-8737-9f1f802599e2 None None] Updated inventory for provider 5a20dc57-fddd-49ec-bab5-953a03eebaa1 with generation 0 in Placement from set_inventory_for_provider using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 96, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61962) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:971}} [ 503.371182] env[61962]: DEBUG nova.compute.provider_tree [None req-84155d27-a27a-417d-8737-9f1f802599e2 None None] Updating resource provider 5a20dc57-fddd-49ec-bab5-953a03eebaa1 generation from 0 to 1 during operation: update_inventory {{(pid=61962) _update_generation /opt/stack/nova/nova/compute/provider_tree.py:164}} [ 503.371328] env[61962]: DEBUG nova.compute.provider_tree [None req-84155d27-a27a-417d-8737-9f1f802599e2 None None] Updating inventory in ProviderTree for provider 5a20dc57-fddd-49ec-bab5-953a03eebaa1 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 96, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61962) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 503.418652] env[61962]: DEBUG nova.compute.provider_tree [None req-84155d27-a27a-417d-8737-9f1f802599e2 None None] Updating resource provider 5a20dc57-fddd-49ec-bab5-953a03eebaa1 generation from 1 to 2 during operation: update_traits {{(pid=61962) _update_generation /opt/stack/nova/nova/compute/provider_tree.py:164}} [ 503.438969] env[61962]: DEBUG nova.compute.resource_tracker [None req-84155d27-a27a-417d-8737-9f1f802599e2 None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=61962) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 503.439226] env[61962]: DEBUG oslo_concurrency.lockutils [None req-84155d27-a27a-417d-8737-9f1f802599e2 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 0.384s {{(pid=61962) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 503.439403] env[61962]: DEBUG nova.service [None req-84155d27-a27a-417d-8737-9f1f802599e2 None None] Creating RPC server for service compute {{(pid=61962) start /opt/stack/nova/nova/service.py:182}} [ 503.452716] env[61962]: DEBUG nova.service [None req-84155d27-a27a-417d-8737-9f1f802599e2 None None] Join ServiceGroup membership for this service compute {{(pid=61962) start /opt/stack/nova/nova/service.py:199}} [ 503.452921] env[61962]: DEBUG nova.servicegroup.drivers.db [None req-84155d27-a27a-417d-8737-9f1f802599e2 None None] DB_Driver: join new ServiceGroup member cpu-1 to the compute group, service = {{(pid=61962) join /opt/stack/nova/nova/servicegroup/drivers/db.py:44}} [ 512.715249] env[61962]: DEBUG dbcounter [-] [61962] Writing DB stats nova_cell1:SELECT=1 {{(pid=61962) stat_writer /opt/stack/data/venv/lib/python3.10/site-packages/dbcounter.py:115}} [ 512.716273] env[61962]: DEBUG dbcounter [-] [61962] Writing DB stats nova_cell0:SELECT=1 {{(pid=61962) stat_writer /opt/stack/data/venv/lib/python3.10/site-packages/dbcounter.py:115}} [ 530.455913] env[61962]: DEBUG oslo_service.periodic_task [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Running periodic task ComputeManager._sync_power_states {{(pid=61962) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 530.468757] env[61962]: DEBUG nova.virt.vmwareapi.vmops [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Getting list of instances from cluster (obj){ [ 530.468757] env[61962]: value = "domain-c8" [ 530.468757] env[61962]: _type = "ClusterComputeResource" [ 530.468757] env[61962]: } {{(pid=61962) list_instances /opt/stack/nova/nova/virt/vmwareapi/vmops.py:2122}} [ 530.471030] env[61962]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6bc24c36-f0a5-44d1-8de7-6b30ac083caf {{(pid=61962) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 530.479336] env[61962]: DEBUG nova.virt.vmwareapi.vmops [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Got total of 0 instances {{(pid=61962) list_instances /opt/stack/nova/nova/virt/vmwareapi/vmops.py:2131}} [ 530.479724] env[61962]: DEBUG oslo_service.periodic_task [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Running periodic task ComputeManager._cleanup_running_deleted_instances {{(pid=61962) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 530.480178] env[61962]: DEBUG nova.virt.vmwareapi.vmops [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Getting list of instances from cluster (obj){ [ 530.480178] env[61962]: value = "domain-c8" [ 530.480178] env[61962]: _type = "ClusterComputeResource" [ 530.480178] env[61962]: } {{(pid=61962) list_instances /opt/stack/nova/nova/virt/vmwareapi/vmops.py:2122}} [ 530.481306] env[61962]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f6a22d72-ceac-421a-a6ab-d8d6a66ac81c {{(pid=61962) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 530.489840] env[61962]: DEBUG nova.virt.vmwareapi.vmops [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Got total of 0 instances {{(pid=61962) list_instances /opt/stack/nova/nova/virt/vmwareapi/vmops.py:2131}} [ 546.419905] env[61962]: DEBUG oslo_concurrency.lockutils [None req-fbaae0eb-893e-49a1-93b8-ba5ae997b76b tempest-ServerDiagnosticsTest-151015285 tempest-ServerDiagnosticsTest-151015285-project-member] Acquiring lock "495acb0f-5b45-457e-9218-db57a3612289" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61962) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 546.420374] env[61962]: DEBUG oslo_concurrency.lockutils [None req-fbaae0eb-893e-49a1-93b8-ba5ae997b76b tempest-ServerDiagnosticsTest-151015285 tempest-ServerDiagnosticsTest-151015285-project-member] Lock "495acb0f-5b45-457e-9218-db57a3612289" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=61962) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 546.444649] env[61962]: DEBUG nova.compute.manager [None req-fbaae0eb-893e-49a1-93b8-ba5ae997b76b tempest-ServerDiagnosticsTest-151015285 tempest-ServerDiagnosticsTest-151015285-project-member] [instance: 495acb0f-5b45-457e-9218-db57a3612289] Starting instance... {{(pid=61962) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2424}} [ 546.570960] env[61962]: DEBUG oslo_concurrency.lockutils [None req-fbaae0eb-893e-49a1-93b8-ba5ae997b76b tempest-ServerDiagnosticsTest-151015285 tempest-ServerDiagnosticsTest-151015285-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61962) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 546.571262] env[61962]: DEBUG oslo_concurrency.lockutils [None req-fbaae0eb-893e-49a1-93b8-ba5ae997b76b tempest-ServerDiagnosticsTest-151015285 tempest-ServerDiagnosticsTest-151015285-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.001s {{(pid=61962) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 546.575018] env[61962]: INFO nova.compute.claims [None req-fbaae0eb-893e-49a1-93b8-ba5ae997b76b tempest-ServerDiagnosticsTest-151015285 tempest-ServerDiagnosticsTest-151015285-project-member] [instance: 495acb0f-5b45-457e-9218-db57a3612289] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 546.754275] env[61962]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-749b4d4c-b1ad-435c-979b-74e86de9f4cd {{(pid=61962) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 546.762408] env[61962]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4fc589c9-984a-44b6-91db-a2a2fa49454d {{(pid=61962) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 546.794474] env[61962]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e82e6e1f-9a01-44d7-aaad-d41958603645 {{(pid=61962) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 546.804164] env[61962]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5b25d98b-9f5e-4e74-8a4c-887fe2fd9b73 {{(pid=61962) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 546.818807] env[61962]: DEBUG nova.compute.provider_tree [None req-fbaae0eb-893e-49a1-93b8-ba5ae997b76b tempest-ServerDiagnosticsTest-151015285 tempest-ServerDiagnosticsTest-151015285-project-member] Inventory has not changed in ProviderTree for provider: 5a20dc57-fddd-49ec-bab5-953a03eebaa1 {{(pid=61962) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 546.839352] env[61962]: DEBUG nova.scheduler.client.report [None req-fbaae0eb-893e-49a1-93b8-ba5ae997b76b tempest-ServerDiagnosticsTest-151015285 tempest-ServerDiagnosticsTest-151015285-project-member] Inventory has not changed for provider 5a20dc57-fddd-49ec-bab5-953a03eebaa1 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 96, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61962) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 546.860476] env[61962]: DEBUG oslo_concurrency.lockutils [None req-fbaae0eb-893e-49a1-93b8-ba5ae997b76b tempest-ServerDiagnosticsTest-151015285 tempest-ServerDiagnosticsTest-151015285-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.289s {{(pid=61962) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 546.862248] env[61962]: DEBUG nova.compute.manager [None req-fbaae0eb-893e-49a1-93b8-ba5ae997b76b tempest-ServerDiagnosticsTest-151015285 tempest-ServerDiagnosticsTest-151015285-project-member] [instance: 495acb0f-5b45-457e-9218-db57a3612289] Start building networks asynchronously for instance. {{(pid=61962) _build_resources /opt/stack/nova/nova/compute/manager.py:2821}} [ 546.917474] env[61962]: DEBUG nova.compute.utils [None req-fbaae0eb-893e-49a1-93b8-ba5ae997b76b tempest-ServerDiagnosticsTest-151015285 tempest-ServerDiagnosticsTest-151015285-project-member] Using /dev/sd instead of None {{(pid=61962) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 546.919140] env[61962]: DEBUG nova.compute.manager [None req-fbaae0eb-893e-49a1-93b8-ba5ae997b76b tempest-ServerDiagnosticsTest-151015285 tempest-ServerDiagnosticsTest-151015285-project-member] [instance: 495acb0f-5b45-457e-9218-db57a3612289] Allocating IP information in the background. {{(pid=61962) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1973}} [ 546.919396] env[61962]: DEBUG nova.network.neutron [None req-fbaae0eb-893e-49a1-93b8-ba5ae997b76b tempest-ServerDiagnosticsTest-151015285 tempest-ServerDiagnosticsTest-151015285-project-member] [instance: 495acb0f-5b45-457e-9218-db57a3612289] allocate_for_instance() {{(pid=61962) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 546.945966] env[61962]: DEBUG nova.compute.manager [None req-fbaae0eb-893e-49a1-93b8-ba5ae997b76b tempest-ServerDiagnosticsTest-151015285 tempest-ServerDiagnosticsTest-151015285-project-member] [instance: 495acb0f-5b45-457e-9218-db57a3612289] Start building block device mappings for instance. {{(pid=61962) _build_resources /opt/stack/nova/nova/compute/manager.py:2856}} [ 547.052425] env[61962]: DEBUG nova.compute.manager [None req-fbaae0eb-893e-49a1-93b8-ba5ae997b76b tempest-ServerDiagnosticsTest-151015285 tempest-ServerDiagnosticsTest-151015285-project-member] [instance: 495acb0f-5b45-457e-9218-db57a3612289] Start spawning the instance on the hypervisor. {{(pid=61962) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2630}} [ 547.322389] env[61962]: DEBUG nova.virt.hardware [None req-fbaae0eb-893e-49a1-93b8-ba5ae997b76b tempest-ServerDiagnosticsTest-151015285 tempest-ServerDiagnosticsTest-151015285-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-12-01T12:09:23Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=128,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-12-01T12:09:07Z,direct_url=,disk_format='vmdk',id=f684bb17-3ab2-4bd5-a19e-4c36c57d0ebe,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='eef556fb5c4f49b889491ae31a496de5',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-12-01T12:09:08Z,virtual_size=,visibility=), allow threads: False {{(pid=61962) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 547.322639] env[61962]: DEBUG nova.virt.hardware [None req-fbaae0eb-893e-49a1-93b8-ba5ae997b76b tempest-ServerDiagnosticsTest-151015285 tempest-ServerDiagnosticsTest-151015285-project-member] Flavor limits 0:0:0 {{(pid=61962) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 547.322803] env[61962]: DEBUG nova.virt.hardware [None req-fbaae0eb-893e-49a1-93b8-ba5ae997b76b tempest-ServerDiagnosticsTest-151015285 tempest-ServerDiagnosticsTest-151015285-project-member] Image limits 0:0:0 {{(pid=61962) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 547.323019] env[61962]: DEBUG nova.virt.hardware [None req-fbaae0eb-893e-49a1-93b8-ba5ae997b76b tempest-ServerDiagnosticsTest-151015285 tempest-ServerDiagnosticsTest-151015285-project-member] Flavor pref 0:0:0 {{(pid=61962) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 547.323230] env[61962]: DEBUG nova.virt.hardware [None req-fbaae0eb-893e-49a1-93b8-ba5ae997b76b tempest-ServerDiagnosticsTest-151015285 tempest-ServerDiagnosticsTest-151015285-project-member] Image pref 0:0:0 {{(pid=61962) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 547.323376] env[61962]: DEBUG nova.virt.hardware [None req-fbaae0eb-893e-49a1-93b8-ba5ae997b76b tempest-ServerDiagnosticsTest-151015285 tempest-ServerDiagnosticsTest-151015285-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61962) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 547.323652] env[61962]: DEBUG nova.virt.hardware [None req-fbaae0eb-893e-49a1-93b8-ba5ae997b76b tempest-ServerDiagnosticsTest-151015285 tempest-ServerDiagnosticsTest-151015285-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61962) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 547.323791] env[61962]: DEBUG nova.virt.hardware [None req-fbaae0eb-893e-49a1-93b8-ba5ae997b76b tempest-ServerDiagnosticsTest-151015285 tempest-ServerDiagnosticsTest-151015285-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61962) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 547.326089] env[61962]: DEBUG nova.virt.hardware [None req-fbaae0eb-893e-49a1-93b8-ba5ae997b76b tempest-ServerDiagnosticsTest-151015285 tempest-ServerDiagnosticsTest-151015285-project-member] Got 1 possible topologies {{(pid=61962) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 547.326089] env[61962]: DEBUG nova.virt.hardware [None req-fbaae0eb-893e-49a1-93b8-ba5ae997b76b tempest-ServerDiagnosticsTest-151015285 tempest-ServerDiagnosticsTest-151015285-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61962) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 547.326089] env[61962]: DEBUG nova.virt.hardware [None req-fbaae0eb-893e-49a1-93b8-ba5ae997b76b tempest-ServerDiagnosticsTest-151015285 tempest-ServerDiagnosticsTest-151015285-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61962) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 547.326089] env[61962]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2b7056f6-9eff-4c51-a18e-d3e9b09568df {{(pid=61962) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 547.335890] env[61962]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d32b4a0b-b6ab-4716-8e4e-f13e103a3615 {{(pid=61962) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 547.354600] env[61962]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d043896b-519b-4846-950a-f9f7adc330a0 {{(pid=61962) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 547.539223] env[61962]: DEBUG nova.policy [None req-fbaae0eb-893e-49a1-93b8-ba5ae997b76b tempest-ServerDiagnosticsTest-151015285 tempest-ServerDiagnosticsTest-151015285-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'c39ed781d86d430f96f9c4211aa7925c', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '5387c68fd60c4139a463bca1d3255da0', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61962) authorize /opt/stack/nova/nova/policy.py:203}} [ 548.163475] env[61962]: DEBUG oslo_concurrency.lockutils [None req-29af0b05-c0ff-41ef-8103-714fd2cae28f tempest-ServerDiagnosticsNegativeTest-1190275590 tempest-ServerDiagnosticsNegativeTest-1190275590-project-member] Acquiring lock "53921884-f617-4020-b779-650becf89a58" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61962) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 548.163600] env[61962]: DEBUG oslo_concurrency.lockutils [None req-29af0b05-c0ff-41ef-8103-714fd2cae28f tempest-ServerDiagnosticsNegativeTest-1190275590 tempest-ServerDiagnosticsNegativeTest-1190275590-project-member] Lock "53921884-f617-4020-b779-650becf89a58" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61962) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 548.195053] env[61962]: DEBUG nova.compute.manager [None req-29af0b05-c0ff-41ef-8103-714fd2cae28f tempest-ServerDiagnosticsNegativeTest-1190275590 tempest-ServerDiagnosticsNegativeTest-1190275590-project-member] [instance: 53921884-f617-4020-b779-650becf89a58] Starting instance... {{(pid=61962) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2424}} [ 548.308903] env[61962]: DEBUG oslo_concurrency.lockutils [None req-29af0b05-c0ff-41ef-8103-714fd2cae28f tempest-ServerDiagnosticsNegativeTest-1190275590 tempest-ServerDiagnosticsNegativeTest-1190275590-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61962) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 548.309409] env[61962]: DEBUG oslo_concurrency.lockutils [None req-29af0b05-c0ff-41ef-8103-714fd2cae28f tempest-ServerDiagnosticsNegativeTest-1190275590 tempest-ServerDiagnosticsNegativeTest-1190275590-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=61962) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 548.311034] env[61962]: INFO nova.compute.claims [None req-29af0b05-c0ff-41ef-8103-714fd2cae28f tempest-ServerDiagnosticsNegativeTest-1190275590 tempest-ServerDiagnosticsNegativeTest-1190275590-project-member] [instance: 53921884-f617-4020-b779-650becf89a58] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 548.416731] env[61962]: DEBUG oslo_concurrency.lockutils [None req-98f60c33-8db1-481d-81f0-6e45672909f4 tempest-ServerDiagnosticsV248Test-33376295 tempest-ServerDiagnosticsV248Test-33376295-project-member] Acquiring lock "06028841-b941-4352-b5c1-fa71cc87a4a7" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61962) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 548.416951] env[61962]: DEBUG oslo_concurrency.lockutils [None req-98f60c33-8db1-481d-81f0-6e45672909f4 tempest-ServerDiagnosticsV248Test-33376295 tempest-ServerDiagnosticsV248Test-33376295-project-member] Lock "06028841-b941-4352-b5c1-fa71cc87a4a7" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61962) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 548.441063] env[61962]: DEBUG nova.compute.manager [None req-98f60c33-8db1-481d-81f0-6e45672909f4 tempest-ServerDiagnosticsV248Test-33376295 tempest-ServerDiagnosticsV248Test-33376295-project-member] [instance: 06028841-b941-4352-b5c1-fa71cc87a4a7] Starting instance... {{(pid=61962) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2424}} [ 548.492413] env[61962]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0330f257-bcf8-4d1e-b4da-ad6106731923 {{(pid=61962) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 548.508391] env[61962]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3ffe0427-f77c-48a8-bbe6-a9ae9c8bc6df {{(pid=61962) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 548.547683] env[61962]: DEBUG oslo_concurrency.lockutils [None req-98f60c33-8db1-481d-81f0-6e45672909f4 tempest-ServerDiagnosticsV248Test-33376295 tempest-ServerDiagnosticsV248Test-33376295-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61962) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 548.548647] env[61962]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e121484b-4fdc-4105-9b23-9ba851006395 {{(pid=61962) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 548.561168] env[61962]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5ea73127-92d4-4d66-9d71-fba309ae5f23 {{(pid=61962) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 548.587707] env[61962]: DEBUG nova.compute.provider_tree [None req-29af0b05-c0ff-41ef-8103-714fd2cae28f tempest-ServerDiagnosticsNegativeTest-1190275590 tempest-ServerDiagnosticsNegativeTest-1190275590-project-member] Inventory has not changed in ProviderTree for provider: 5a20dc57-fddd-49ec-bab5-953a03eebaa1 {{(pid=61962) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 548.607883] env[61962]: DEBUG nova.scheduler.client.report [None req-29af0b05-c0ff-41ef-8103-714fd2cae28f tempest-ServerDiagnosticsNegativeTest-1190275590 tempest-ServerDiagnosticsNegativeTest-1190275590-project-member] Inventory has not changed for provider 5a20dc57-fddd-49ec-bab5-953a03eebaa1 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 96, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61962) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 548.649887] env[61962]: DEBUG oslo_concurrency.lockutils [None req-29af0b05-c0ff-41ef-8103-714fd2cae28f tempest-ServerDiagnosticsNegativeTest-1190275590 tempest-ServerDiagnosticsNegativeTest-1190275590-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.340s {{(pid=61962) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 548.650455] env[61962]: DEBUG nova.compute.manager [None req-29af0b05-c0ff-41ef-8103-714fd2cae28f tempest-ServerDiagnosticsNegativeTest-1190275590 tempest-ServerDiagnosticsNegativeTest-1190275590-project-member] [instance: 53921884-f617-4020-b779-650becf89a58] Start building networks asynchronously for instance. {{(pid=61962) _build_resources /opt/stack/nova/nova/compute/manager.py:2821}} [ 548.653277] env[61962]: DEBUG oslo_concurrency.lockutils [None req-98f60c33-8db1-481d-81f0-6e45672909f4 tempest-ServerDiagnosticsV248Test-33376295 tempest-ServerDiagnosticsV248Test-33376295-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.105s {{(pid=61962) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 548.654840] env[61962]: INFO nova.compute.claims [None req-98f60c33-8db1-481d-81f0-6e45672909f4 tempest-ServerDiagnosticsV248Test-33376295 tempest-ServerDiagnosticsV248Test-33376295-project-member] [instance: 06028841-b941-4352-b5c1-fa71cc87a4a7] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 548.676553] env[61962]: DEBUG nova.network.neutron [None req-fbaae0eb-893e-49a1-93b8-ba5ae997b76b tempest-ServerDiagnosticsTest-151015285 tempest-ServerDiagnosticsTest-151015285-project-member] [instance: 495acb0f-5b45-457e-9218-db57a3612289] Successfully created port: f8f22fe7-d92c-4cfa-b645-fc641223c320 {{(pid=61962) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 548.725045] env[61962]: DEBUG nova.compute.utils [None req-29af0b05-c0ff-41ef-8103-714fd2cae28f tempest-ServerDiagnosticsNegativeTest-1190275590 tempest-ServerDiagnosticsNegativeTest-1190275590-project-member] Using /dev/sd instead of None {{(pid=61962) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 548.727378] env[61962]: DEBUG nova.compute.manager [None req-29af0b05-c0ff-41ef-8103-714fd2cae28f tempest-ServerDiagnosticsNegativeTest-1190275590 tempest-ServerDiagnosticsNegativeTest-1190275590-project-member] [instance: 53921884-f617-4020-b779-650becf89a58] Allocating IP information in the background. {{(pid=61962) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1973}} [ 548.728093] env[61962]: DEBUG nova.network.neutron [None req-29af0b05-c0ff-41ef-8103-714fd2cae28f tempest-ServerDiagnosticsNegativeTest-1190275590 tempest-ServerDiagnosticsNegativeTest-1190275590-project-member] [instance: 53921884-f617-4020-b779-650becf89a58] allocate_for_instance() {{(pid=61962) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 548.741876] env[61962]: DEBUG nova.compute.manager [None req-29af0b05-c0ff-41ef-8103-714fd2cae28f tempest-ServerDiagnosticsNegativeTest-1190275590 tempest-ServerDiagnosticsNegativeTest-1190275590-project-member] [instance: 53921884-f617-4020-b779-650becf89a58] Start building block device mappings for instance. {{(pid=61962) _build_resources /opt/stack/nova/nova/compute/manager.py:2856}} [ 548.828166] env[61962]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-54ed005c-3521-44f7-ba16-8ab825ab610a {{(pid=61962) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 548.836507] env[61962]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-856e021f-09bf-4c80-b7d6-255c77bc26ab {{(pid=61962) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 548.881384] env[61962]: DEBUG nova.policy [None req-29af0b05-c0ff-41ef-8103-714fd2cae28f tempest-ServerDiagnosticsNegativeTest-1190275590 tempest-ServerDiagnosticsNegativeTest-1190275590-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '5a95f97926f049339b8a29b8f122d90b', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'c7a5eef5ba8d4ba88356ffba87aec75c', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61962) authorize /opt/stack/nova/nova/policy.py:203}} [ 548.885233] env[61962]: DEBUG nova.compute.manager [None req-29af0b05-c0ff-41ef-8103-714fd2cae28f tempest-ServerDiagnosticsNegativeTest-1190275590 tempest-ServerDiagnosticsNegativeTest-1190275590-project-member] [instance: 53921884-f617-4020-b779-650becf89a58] Start spawning the instance on the hypervisor. {{(pid=61962) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2630}} [ 548.885446] env[61962]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e2a935f7-f107-42ec-9883-cdc29b5ec7b5 {{(pid=61962) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 548.894963] env[61962]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6b434c7d-530c-47ec-9a9b-011bbca235d3 {{(pid=61962) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 548.915975] env[61962]: DEBUG nova.compute.provider_tree [None req-98f60c33-8db1-481d-81f0-6e45672909f4 tempest-ServerDiagnosticsV248Test-33376295 tempest-ServerDiagnosticsV248Test-33376295-project-member] Inventory has not changed in ProviderTree for provider: 5a20dc57-fddd-49ec-bab5-953a03eebaa1 {{(pid=61962) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 548.931823] env[61962]: DEBUG nova.virt.hardware [None req-29af0b05-c0ff-41ef-8103-714fd2cae28f tempest-ServerDiagnosticsNegativeTest-1190275590 tempest-ServerDiagnosticsNegativeTest-1190275590-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-12-01T12:09:23Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=128,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-12-01T12:09:07Z,direct_url=,disk_format='vmdk',id=f684bb17-3ab2-4bd5-a19e-4c36c57d0ebe,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='eef556fb5c4f49b889491ae31a496de5',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-12-01T12:09:08Z,virtual_size=,visibility=), allow threads: False {{(pid=61962) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 548.931978] env[61962]: DEBUG nova.virt.hardware [None req-29af0b05-c0ff-41ef-8103-714fd2cae28f tempest-ServerDiagnosticsNegativeTest-1190275590 tempest-ServerDiagnosticsNegativeTest-1190275590-project-member] Flavor limits 0:0:0 {{(pid=61962) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 548.933096] env[61962]: DEBUG nova.virt.hardware [None req-29af0b05-c0ff-41ef-8103-714fd2cae28f tempest-ServerDiagnosticsNegativeTest-1190275590 tempest-ServerDiagnosticsNegativeTest-1190275590-project-member] Image limits 0:0:0 {{(pid=61962) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 548.933096] env[61962]: DEBUG nova.virt.hardware [None req-29af0b05-c0ff-41ef-8103-714fd2cae28f tempest-ServerDiagnosticsNegativeTest-1190275590 tempest-ServerDiagnosticsNegativeTest-1190275590-project-member] Flavor pref 0:0:0 {{(pid=61962) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 548.933096] env[61962]: DEBUG nova.virt.hardware [None req-29af0b05-c0ff-41ef-8103-714fd2cae28f tempest-ServerDiagnosticsNegativeTest-1190275590 tempest-ServerDiagnosticsNegativeTest-1190275590-project-member] Image pref 0:0:0 {{(pid=61962) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 548.933096] env[61962]: DEBUG nova.virt.hardware [None req-29af0b05-c0ff-41ef-8103-714fd2cae28f tempest-ServerDiagnosticsNegativeTest-1190275590 tempest-ServerDiagnosticsNegativeTest-1190275590-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61962) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 548.933096] env[61962]: DEBUG nova.virt.hardware [None req-29af0b05-c0ff-41ef-8103-714fd2cae28f tempest-ServerDiagnosticsNegativeTest-1190275590 tempest-ServerDiagnosticsNegativeTest-1190275590-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61962) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 548.933419] env[61962]: DEBUG nova.virt.hardware [None req-29af0b05-c0ff-41ef-8103-714fd2cae28f tempest-ServerDiagnosticsNegativeTest-1190275590 tempest-ServerDiagnosticsNegativeTest-1190275590-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61962) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 548.939158] env[61962]: DEBUG nova.virt.hardware [None req-29af0b05-c0ff-41ef-8103-714fd2cae28f tempest-ServerDiagnosticsNegativeTest-1190275590 tempest-ServerDiagnosticsNegativeTest-1190275590-project-member] Got 1 possible topologies {{(pid=61962) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 548.939158] env[61962]: DEBUG nova.virt.hardware [None req-29af0b05-c0ff-41ef-8103-714fd2cae28f tempest-ServerDiagnosticsNegativeTest-1190275590 tempest-ServerDiagnosticsNegativeTest-1190275590-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61962) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 548.939158] env[61962]: DEBUG nova.virt.hardware [None req-29af0b05-c0ff-41ef-8103-714fd2cae28f tempest-ServerDiagnosticsNegativeTest-1190275590 tempest-ServerDiagnosticsNegativeTest-1190275590-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61962) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 548.939158] env[61962]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fb63cc01-bb8f-481b-849d-63a984715ac2 {{(pid=61962) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 548.942473] env[61962]: DEBUG nova.scheduler.client.report [None req-98f60c33-8db1-481d-81f0-6e45672909f4 tempest-ServerDiagnosticsV248Test-33376295 tempest-ServerDiagnosticsV248Test-33376295-project-member] Inventory has not changed for provider 5a20dc57-fddd-49ec-bab5-953a03eebaa1 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 96, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61962) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 548.955592] env[61962]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f794dfe7-fac9-4a4b-b6ab-3f086c2e4ff1 {{(pid=61962) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 548.963955] env[61962]: DEBUG oslo_concurrency.lockutils [None req-98f60c33-8db1-481d-81f0-6e45672909f4 tempest-ServerDiagnosticsV248Test-33376295 tempest-ServerDiagnosticsV248Test-33376295-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.309s {{(pid=61962) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 548.963955] env[61962]: DEBUG nova.compute.manager [None req-98f60c33-8db1-481d-81f0-6e45672909f4 tempest-ServerDiagnosticsV248Test-33376295 tempest-ServerDiagnosticsV248Test-33376295-project-member] [instance: 06028841-b941-4352-b5c1-fa71cc87a4a7] Start building networks asynchronously for instance. {{(pid=61962) _build_resources /opt/stack/nova/nova/compute/manager.py:2821}} [ 549.006839] env[61962]: DEBUG nova.compute.utils [None req-98f60c33-8db1-481d-81f0-6e45672909f4 tempest-ServerDiagnosticsV248Test-33376295 tempest-ServerDiagnosticsV248Test-33376295-project-member] Using /dev/sd instead of None {{(pid=61962) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 549.007811] env[61962]: DEBUG nova.compute.manager [None req-98f60c33-8db1-481d-81f0-6e45672909f4 tempest-ServerDiagnosticsV248Test-33376295 tempest-ServerDiagnosticsV248Test-33376295-project-member] [instance: 06028841-b941-4352-b5c1-fa71cc87a4a7] Not allocating networking since 'none' was specified. {{(pid=61962) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1969}} [ 549.019639] env[61962]: DEBUG nova.compute.manager [None req-98f60c33-8db1-481d-81f0-6e45672909f4 tempest-ServerDiagnosticsV248Test-33376295 tempest-ServerDiagnosticsV248Test-33376295-project-member] [instance: 06028841-b941-4352-b5c1-fa71cc87a4a7] Start building block device mappings for instance. {{(pid=61962) _build_resources /opt/stack/nova/nova/compute/manager.py:2856}} [ 549.105456] env[61962]: DEBUG oslo_concurrency.lockutils [None req-21d4b7db-8868-406d-878a-d4e21834a00a tempest-ServerExternalEventsTest-1541707263 tempest-ServerExternalEventsTest-1541707263-project-member] Acquiring lock "dc6c9cb6-419f-4122-8351-0c517fb8616d" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61962) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 549.106037] env[61962]: DEBUG oslo_concurrency.lockutils [None req-21d4b7db-8868-406d-878a-d4e21834a00a tempest-ServerExternalEventsTest-1541707263 tempest-ServerExternalEventsTest-1541707263-project-member] Lock "dc6c9cb6-419f-4122-8351-0c517fb8616d" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61962) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 549.109024] env[61962]: DEBUG nova.compute.manager [None req-98f60c33-8db1-481d-81f0-6e45672909f4 tempest-ServerDiagnosticsV248Test-33376295 tempest-ServerDiagnosticsV248Test-33376295-project-member] [instance: 06028841-b941-4352-b5c1-fa71cc87a4a7] Start spawning the instance on the hypervisor. {{(pid=61962) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2630}} [ 549.120412] env[61962]: DEBUG nova.compute.manager [None req-21d4b7db-8868-406d-878a-d4e21834a00a tempest-ServerExternalEventsTest-1541707263 tempest-ServerExternalEventsTest-1541707263-project-member] [instance: dc6c9cb6-419f-4122-8351-0c517fb8616d] Starting instance... {{(pid=61962) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2424}} [ 549.171181] env[61962]: DEBUG nova.virt.hardware [None req-98f60c33-8db1-481d-81f0-6e45672909f4 tempest-ServerDiagnosticsV248Test-33376295 tempest-ServerDiagnosticsV248Test-33376295-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-12-01T12:09:23Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=128,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-12-01T12:09:07Z,direct_url=,disk_format='vmdk',id=f684bb17-3ab2-4bd5-a19e-4c36c57d0ebe,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='eef556fb5c4f49b889491ae31a496de5',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-12-01T12:09:08Z,virtual_size=,visibility=), allow threads: False {{(pid=61962) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 549.171429] env[61962]: DEBUG nova.virt.hardware [None req-98f60c33-8db1-481d-81f0-6e45672909f4 tempest-ServerDiagnosticsV248Test-33376295 tempest-ServerDiagnosticsV248Test-33376295-project-member] Flavor limits 0:0:0 {{(pid=61962) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 549.171591] env[61962]: DEBUG nova.virt.hardware [None req-98f60c33-8db1-481d-81f0-6e45672909f4 tempest-ServerDiagnosticsV248Test-33376295 tempest-ServerDiagnosticsV248Test-33376295-project-member] Image limits 0:0:0 {{(pid=61962) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 549.173151] env[61962]: DEBUG nova.virt.hardware [None req-98f60c33-8db1-481d-81f0-6e45672909f4 tempest-ServerDiagnosticsV248Test-33376295 tempest-ServerDiagnosticsV248Test-33376295-project-member] Flavor pref 0:0:0 {{(pid=61962) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 549.173151] env[61962]: DEBUG nova.virt.hardware [None req-98f60c33-8db1-481d-81f0-6e45672909f4 tempest-ServerDiagnosticsV248Test-33376295 tempest-ServerDiagnosticsV248Test-33376295-project-member] Image pref 0:0:0 {{(pid=61962) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 549.173151] env[61962]: DEBUG nova.virt.hardware [None req-98f60c33-8db1-481d-81f0-6e45672909f4 tempest-ServerDiagnosticsV248Test-33376295 tempest-ServerDiagnosticsV248Test-33376295-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61962) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 549.173151] env[61962]: DEBUG nova.virt.hardware [None req-98f60c33-8db1-481d-81f0-6e45672909f4 tempest-ServerDiagnosticsV248Test-33376295 tempest-ServerDiagnosticsV248Test-33376295-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61962) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 549.173151] env[61962]: DEBUG nova.virt.hardware [None req-98f60c33-8db1-481d-81f0-6e45672909f4 tempest-ServerDiagnosticsV248Test-33376295 tempest-ServerDiagnosticsV248Test-33376295-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61962) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 549.173401] env[61962]: DEBUG nova.virt.hardware [None req-98f60c33-8db1-481d-81f0-6e45672909f4 tempest-ServerDiagnosticsV248Test-33376295 tempest-ServerDiagnosticsV248Test-33376295-project-member] Got 1 possible topologies {{(pid=61962) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 549.173401] env[61962]: DEBUG nova.virt.hardware [None req-98f60c33-8db1-481d-81f0-6e45672909f4 tempest-ServerDiagnosticsV248Test-33376295 tempest-ServerDiagnosticsV248Test-33376295-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61962) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 549.173401] env[61962]: DEBUG nova.virt.hardware [None req-98f60c33-8db1-481d-81f0-6e45672909f4 tempest-ServerDiagnosticsV248Test-33376295 tempest-ServerDiagnosticsV248Test-33376295-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61962) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 549.174630] env[61962]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d362a537-f7bf-409e-8e35-5a0de9aba8fa {{(pid=61962) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 549.184981] env[61962]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ef92ab66-3614-48a2-9da6-ae924808c408 {{(pid=61962) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 549.216600] env[61962]: DEBUG nova.virt.vmwareapi.vmops [None req-98f60c33-8db1-481d-81f0-6e45672909f4 tempest-ServerDiagnosticsV248Test-33376295 tempest-ServerDiagnosticsV248Test-33376295-project-member] [instance: 06028841-b941-4352-b5c1-fa71cc87a4a7] Instance VIF info [] {{(pid=61962) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 549.224521] env[61962]: DEBUG nova.virt.vmwareapi.vm_util [None req-98f60c33-8db1-481d-81f0-6e45672909f4 tempest-ServerDiagnosticsV248Test-33376295 tempest-ServerDiagnosticsV248Test-33376295-project-member] Creating folder: OpenStack. Parent ref: group-v4. {{(pid=61962) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 549.228048] env[61962]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-e282c670-063d-49ef-8041-0ebe8d517298 {{(pid=61962) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 549.241459] env[61962]: INFO nova.virt.vmwareapi.vm_util [None req-98f60c33-8db1-481d-81f0-6e45672909f4 tempest-ServerDiagnosticsV248Test-33376295 tempest-ServerDiagnosticsV248Test-33376295-project-member] Created folder: OpenStack in parent group-v4. [ 549.241518] env[61962]: DEBUG nova.virt.vmwareapi.vm_util [None req-98f60c33-8db1-481d-81f0-6e45672909f4 tempest-ServerDiagnosticsV248Test-33376295 tempest-ServerDiagnosticsV248Test-33376295-project-member] Creating folder: Project (30841c5d09234932ad2f2ee679f7ee5d). Parent ref: group-v953327. {{(pid=61962) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 549.241871] env[61962]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-4e56fa8d-2bac-4fd8-9be4-65bc00959d6e {{(pid=61962) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 549.253679] env[61962]: INFO nova.virt.vmwareapi.vm_util [None req-98f60c33-8db1-481d-81f0-6e45672909f4 tempest-ServerDiagnosticsV248Test-33376295 tempest-ServerDiagnosticsV248Test-33376295-project-member] Created folder: Project (30841c5d09234932ad2f2ee679f7ee5d) in parent group-v953327. [ 549.253841] env[61962]: DEBUG nova.virt.vmwareapi.vm_util [None req-98f60c33-8db1-481d-81f0-6e45672909f4 tempest-ServerDiagnosticsV248Test-33376295 tempest-ServerDiagnosticsV248Test-33376295-project-member] Creating folder: Instances. Parent ref: group-v953328. {{(pid=61962) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 549.254107] env[61962]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-02bafdc3-a76d-4e6e-9c6a-54d110764753 {{(pid=61962) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 549.266205] env[61962]: INFO nova.virt.vmwareapi.vm_util [None req-98f60c33-8db1-481d-81f0-6e45672909f4 tempest-ServerDiagnosticsV248Test-33376295 tempest-ServerDiagnosticsV248Test-33376295-project-member] Created folder: Instances in parent group-v953328. [ 549.266574] env[61962]: DEBUG oslo.service.loopingcall [None req-98f60c33-8db1-481d-81f0-6e45672909f4 tempest-ServerDiagnosticsV248Test-33376295 tempest-ServerDiagnosticsV248Test-33376295-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=61962) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 549.266865] env[61962]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 06028841-b941-4352-b5c1-fa71cc87a4a7] Creating VM on the ESX host {{(pid=61962) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 549.267442] env[61962]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-d69e2ab0-68db-4d8c-861e-d6d99420305c {{(pid=61962) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 549.280991] env[61962]: DEBUG oslo_concurrency.lockutils [None req-21d4b7db-8868-406d-878a-d4e21834a00a tempest-ServerExternalEventsTest-1541707263 tempest-ServerExternalEventsTest-1541707263-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61962) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 549.281861] env[61962]: DEBUG oslo_concurrency.lockutils [None req-21d4b7db-8868-406d-878a-d4e21834a00a tempest-ServerExternalEventsTest-1541707263 tempest-ServerExternalEventsTest-1541707263-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=61962) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 549.283267] env[61962]: INFO nova.compute.claims [None req-21d4b7db-8868-406d-878a-d4e21834a00a tempest-ServerExternalEventsTest-1541707263 tempest-ServerExternalEventsTest-1541707263-project-member] [instance: dc6c9cb6-419f-4122-8351-0c517fb8616d] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 549.295500] env[61962]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 549.295500] env[61962]: value = "task-4891901" [ 549.295500] env[61962]: _type = "Task" [ 549.295500] env[61962]: } to complete. {{(pid=61962) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 549.305016] env[61962]: DEBUG oslo_vmware.api [-] Task: {'id': task-4891901, 'name': CreateVM_Task} progress is 0%. {{(pid=61962) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 549.525048] env[61962]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1c0a8941-4f01-4dac-a3fd-0a69af686221 {{(pid=61962) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 549.535218] env[61962]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-de5bc431-16c4-43a0-905d-c6c66bb0d1fb {{(pid=61962) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 549.573509] env[61962]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e4e4ee99-90af-4bae-90e3-d7baeae662f4 {{(pid=61962) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 549.583204] env[61962]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3c0178dd-6c7c-4887-9b1d-03e9b77edbc4 {{(pid=61962) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 549.599914] env[61962]: DEBUG nova.compute.provider_tree [None req-21d4b7db-8868-406d-878a-d4e21834a00a tempest-ServerExternalEventsTest-1541707263 tempest-ServerExternalEventsTest-1541707263-project-member] Inventory has not changed in ProviderTree for provider: 5a20dc57-fddd-49ec-bab5-953a03eebaa1 {{(pid=61962) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 549.613058] env[61962]: DEBUG nova.scheduler.client.report [None req-21d4b7db-8868-406d-878a-d4e21834a00a tempest-ServerExternalEventsTest-1541707263 tempest-ServerExternalEventsTest-1541707263-project-member] Inventory has not changed for provider 5a20dc57-fddd-49ec-bab5-953a03eebaa1 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 96, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61962) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 549.635952] env[61962]: DEBUG oslo_concurrency.lockutils [None req-21d4b7db-8868-406d-878a-d4e21834a00a tempest-ServerExternalEventsTest-1541707263 tempest-ServerExternalEventsTest-1541707263-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.352s {{(pid=61962) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 549.635952] env[61962]: DEBUG nova.compute.manager [None req-21d4b7db-8868-406d-878a-d4e21834a00a tempest-ServerExternalEventsTest-1541707263 tempest-ServerExternalEventsTest-1541707263-project-member] [instance: dc6c9cb6-419f-4122-8351-0c517fb8616d] Start building networks asynchronously for instance. {{(pid=61962) _build_resources /opt/stack/nova/nova/compute/manager.py:2821}} [ 549.684054] env[61962]: DEBUG nova.compute.utils [None req-21d4b7db-8868-406d-878a-d4e21834a00a tempest-ServerExternalEventsTest-1541707263 tempest-ServerExternalEventsTest-1541707263-project-member] Using /dev/sd instead of None {{(pid=61962) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 549.685204] env[61962]: DEBUG nova.compute.manager [None req-21d4b7db-8868-406d-878a-d4e21834a00a tempest-ServerExternalEventsTest-1541707263 tempest-ServerExternalEventsTest-1541707263-project-member] [instance: dc6c9cb6-419f-4122-8351-0c517fb8616d] Allocating IP information in the background. {{(pid=61962) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1973}} [ 549.685582] env[61962]: DEBUG nova.network.neutron [None req-21d4b7db-8868-406d-878a-d4e21834a00a tempest-ServerExternalEventsTest-1541707263 tempest-ServerExternalEventsTest-1541707263-project-member] [instance: dc6c9cb6-419f-4122-8351-0c517fb8616d] allocate_for_instance() {{(pid=61962) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 549.697048] env[61962]: DEBUG nova.compute.manager [None req-21d4b7db-8868-406d-878a-d4e21834a00a tempest-ServerExternalEventsTest-1541707263 tempest-ServerExternalEventsTest-1541707263-project-member] [instance: dc6c9cb6-419f-4122-8351-0c517fb8616d] Start building block device mappings for instance. {{(pid=61962) _build_resources /opt/stack/nova/nova/compute/manager.py:2856}} [ 549.779314] env[61962]: DEBUG nova.compute.manager [None req-21d4b7db-8868-406d-878a-d4e21834a00a tempest-ServerExternalEventsTest-1541707263 tempest-ServerExternalEventsTest-1541707263-project-member] [instance: dc6c9cb6-419f-4122-8351-0c517fb8616d] Start spawning the instance on the hypervisor. {{(pid=61962) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2630}} [ 549.807444] env[61962]: DEBUG oslo_vmware.api [-] Task: {'id': task-4891901, 'name': CreateVM_Task, 'duration_secs': 0.33226} completed successfully. {{(pid=61962) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 549.810316] env[61962]: DEBUG nova.virt.hardware [None req-21d4b7db-8868-406d-878a-d4e21834a00a tempest-ServerExternalEventsTest-1541707263 tempest-ServerExternalEventsTest-1541707263-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-12-01T12:09:23Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=128,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-12-01T12:09:07Z,direct_url=,disk_format='vmdk',id=f684bb17-3ab2-4bd5-a19e-4c36c57d0ebe,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='eef556fb5c4f49b889491ae31a496de5',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-12-01T12:09:08Z,virtual_size=,visibility=), allow threads: False {{(pid=61962) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 549.810564] env[61962]: DEBUG nova.virt.hardware [None req-21d4b7db-8868-406d-878a-d4e21834a00a tempest-ServerExternalEventsTest-1541707263 tempest-ServerExternalEventsTest-1541707263-project-member] Flavor limits 0:0:0 {{(pid=61962) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 549.810718] env[61962]: DEBUG nova.virt.hardware [None req-21d4b7db-8868-406d-878a-d4e21834a00a tempest-ServerExternalEventsTest-1541707263 tempest-ServerExternalEventsTest-1541707263-project-member] Image limits 0:0:0 {{(pid=61962) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 549.810896] env[61962]: DEBUG nova.virt.hardware [None req-21d4b7db-8868-406d-878a-d4e21834a00a tempest-ServerExternalEventsTest-1541707263 tempest-ServerExternalEventsTest-1541707263-project-member] Flavor pref 0:0:0 {{(pid=61962) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 549.811047] env[61962]: DEBUG nova.virt.hardware [None req-21d4b7db-8868-406d-878a-d4e21834a00a tempest-ServerExternalEventsTest-1541707263 tempest-ServerExternalEventsTest-1541707263-project-member] Image pref 0:0:0 {{(pid=61962) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 549.811399] env[61962]: DEBUG nova.virt.hardware [None req-21d4b7db-8868-406d-878a-d4e21834a00a tempest-ServerExternalEventsTest-1541707263 tempest-ServerExternalEventsTest-1541707263-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61962) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 549.811399] env[61962]: DEBUG nova.virt.hardware [None req-21d4b7db-8868-406d-878a-d4e21834a00a tempest-ServerExternalEventsTest-1541707263 tempest-ServerExternalEventsTest-1541707263-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61962) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 549.811558] env[61962]: DEBUG nova.virt.hardware [None req-21d4b7db-8868-406d-878a-d4e21834a00a tempest-ServerExternalEventsTest-1541707263 tempest-ServerExternalEventsTest-1541707263-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61962) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 549.811717] env[61962]: DEBUG nova.virt.hardware [None req-21d4b7db-8868-406d-878a-d4e21834a00a tempest-ServerExternalEventsTest-1541707263 tempest-ServerExternalEventsTest-1541707263-project-member] Got 1 possible topologies {{(pid=61962) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 549.812088] env[61962]: DEBUG nova.virt.hardware [None req-21d4b7db-8868-406d-878a-d4e21834a00a tempest-ServerExternalEventsTest-1541707263 tempest-ServerExternalEventsTest-1541707263-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61962) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 549.812088] env[61962]: DEBUG nova.virt.hardware [None req-21d4b7db-8868-406d-878a-d4e21834a00a tempest-ServerExternalEventsTest-1541707263 tempest-ServerExternalEventsTest-1541707263-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61962) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 549.812285] env[61962]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 06028841-b941-4352-b5c1-fa71cc87a4a7] Created VM on the ESX host {{(pid=61962) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 549.813761] env[61962]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8bf77793-76d3-4f11-af42-b17fc488d8ef {{(pid=61962) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 549.816969] env[61962]: DEBUG oslo_vmware.service [None req-98f60c33-8db1-481d-81f0-6e45672909f4 tempest-ServerDiagnosticsV248Test-33376295 tempest-ServerDiagnosticsV248Test-33376295-project-member] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-49f7d3b1-1adf-4b79-a24b-a8dbaff69f4f {{(pid=61962) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 549.828277] env[61962]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a1ae1c12-3913-4a16-a293-4412c66235c2 {{(pid=61962) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 549.837173] env[61962]: DEBUG oslo_concurrency.lockutils [None req-98f60c33-8db1-481d-81f0-6e45672909f4 tempest-ServerDiagnosticsV248Test-33376295 tempest-ServerDiagnosticsV248Test-33376295-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/f684bb17-3ab2-4bd5-a19e-4c36c57d0ebe" {{(pid=61962) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 549.837403] env[61962]: DEBUG oslo_concurrency.lockutils [None req-98f60c33-8db1-481d-81f0-6e45672909f4 tempest-ServerDiagnosticsV248Test-33376295 tempest-ServerDiagnosticsV248Test-33376295-project-member] Acquired lock "[datastore2] devstack-image-cache_base/f684bb17-3ab2-4bd5-a19e-4c36c57d0ebe" {{(pid=61962) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 549.838260] env[61962]: DEBUG oslo_concurrency.lockutils [None req-98f60c33-8db1-481d-81f0-6e45672909f4 tempest-ServerDiagnosticsV248Test-33376295 tempest-ServerDiagnosticsV248Test-33376295-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/f684bb17-3ab2-4bd5-a19e-4c36c57d0ebe" {{(pid=61962) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 549.838671] env[61962]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-c447deed-333e-4e5c-855f-a5b15ac8a2d0 {{(pid=61962) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 549.854364] env[61962]: DEBUG oslo_vmware.api [None req-98f60c33-8db1-481d-81f0-6e45672909f4 tempest-ServerDiagnosticsV248Test-33376295 tempest-ServerDiagnosticsV248Test-33376295-project-member] Waiting for the task: (returnval){ [ 549.854364] env[61962]: value = "session[5210918f-aadc-9b29-42fa-0929c2e42627]529b095c-441c-5074-3ab7-abea63dc8cc9" [ 549.854364] env[61962]: _type = "Task" [ 549.854364] env[61962]: } to complete. {{(pid=61962) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 549.862356] env[61962]: DEBUG oslo_vmware.api [None req-98f60c33-8db1-481d-81f0-6e45672909f4 tempest-ServerDiagnosticsV248Test-33376295 tempest-ServerDiagnosticsV248Test-33376295-project-member] Task: {'id': session[5210918f-aadc-9b29-42fa-0929c2e42627]529b095c-441c-5074-3ab7-abea63dc8cc9, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61962) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 550.044587] env[61962]: DEBUG nova.policy [None req-21d4b7db-8868-406d-878a-d4e21834a00a tempest-ServerExternalEventsTest-1541707263 tempest-ServerExternalEventsTest-1541707263-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'c9c3daa6abae468f95849f5fca96f99b', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'f1aff07cb3e74298b4e347bd4176fbf8', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61962) authorize /opt/stack/nova/nova/policy.py:203}} [ 550.371466] env[61962]: DEBUG oslo_concurrency.lockutils [None req-98f60c33-8db1-481d-81f0-6e45672909f4 tempest-ServerDiagnosticsV248Test-33376295 tempest-ServerDiagnosticsV248Test-33376295-project-member] Releasing lock "[datastore2] devstack-image-cache_base/f684bb17-3ab2-4bd5-a19e-4c36c57d0ebe" {{(pid=61962) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 550.371809] env[61962]: DEBUG nova.virt.vmwareapi.vmops [None req-98f60c33-8db1-481d-81f0-6e45672909f4 tempest-ServerDiagnosticsV248Test-33376295 tempest-ServerDiagnosticsV248Test-33376295-project-member] [instance: 06028841-b941-4352-b5c1-fa71cc87a4a7] Processing image f684bb17-3ab2-4bd5-a19e-4c36c57d0ebe {{(pid=61962) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 550.372145] env[61962]: DEBUG oslo_concurrency.lockutils [None req-98f60c33-8db1-481d-81f0-6e45672909f4 tempest-ServerDiagnosticsV248Test-33376295 tempest-ServerDiagnosticsV248Test-33376295-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/f684bb17-3ab2-4bd5-a19e-4c36c57d0ebe/f684bb17-3ab2-4bd5-a19e-4c36c57d0ebe.vmdk" {{(pid=61962) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 550.373180] env[61962]: DEBUG oslo_concurrency.lockutils [None req-98f60c33-8db1-481d-81f0-6e45672909f4 tempest-ServerDiagnosticsV248Test-33376295 tempest-ServerDiagnosticsV248Test-33376295-project-member] Acquired lock "[datastore2] devstack-image-cache_base/f684bb17-3ab2-4bd5-a19e-4c36c57d0ebe/f684bb17-3ab2-4bd5-a19e-4c36c57d0ebe.vmdk" {{(pid=61962) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 550.373731] env[61962]: DEBUG nova.virt.vmwareapi.ds_util [None req-98f60c33-8db1-481d-81f0-6e45672909f4 tempest-ServerDiagnosticsV248Test-33376295 tempest-ServerDiagnosticsV248Test-33376295-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=61962) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 550.374938] env[61962]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-7b5ab9a2-3b0b-4fb4-9bfd-679a949d2772 {{(pid=61962) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 550.387249] env[61962]: DEBUG nova.virt.vmwareapi.ds_util [None req-98f60c33-8db1-481d-81f0-6e45672909f4 tempest-ServerDiagnosticsV248Test-33376295 tempest-ServerDiagnosticsV248Test-33376295-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=61962) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 550.387450] env[61962]: DEBUG nova.virt.vmwareapi.vmops [None req-98f60c33-8db1-481d-81f0-6e45672909f4 tempest-ServerDiagnosticsV248Test-33376295 tempest-ServerDiagnosticsV248Test-33376295-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=61962) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 550.388402] env[61962]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7dc1c4ac-9bac-4597-9e48-88b50fde14ca {{(pid=61962) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 550.397313] env[61962]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-12356312-4bd8-4d18-8ade-3d014a8257bd {{(pid=61962) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 550.406633] env[61962]: DEBUG oslo_vmware.api [None req-98f60c33-8db1-481d-81f0-6e45672909f4 tempest-ServerDiagnosticsV248Test-33376295 tempest-ServerDiagnosticsV248Test-33376295-project-member] Waiting for the task: (returnval){ [ 550.406633] env[61962]: value = "session[5210918f-aadc-9b29-42fa-0929c2e42627]529a4d48-d7c0-38a3-03f0-0e0a344417bf" [ 550.406633] env[61962]: _type = "Task" [ 550.406633] env[61962]: } to complete. {{(pid=61962) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 550.420530] env[61962]: DEBUG oslo_vmware.api [None req-98f60c33-8db1-481d-81f0-6e45672909f4 tempest-ServerDiagnosticsV248Test-33376295 tempest-ServerDiagnosticsV248Test-33376295-project-member] Task: {'id': session[5210918f-aadc-9b29-42fa-0929c2e42627]529a4d48-d7c0-38a3-03f0-0e0a344417bf, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61962) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 550.493701] env[61962]: DEBUG oslo_concurrency.lockutils [None req-f081d78e-1cf7-4043-9a57-caa0299432de tempest-ImagesOneServerTestJSON-2032343034 tempest-ImagesOneServerTestJSON-2032343034-project-member] Acquiring lock "6ba9719c-3689-4db6-8ab2-441dc52c7bfd" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61962) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 550.493701] env[61962]: DEBUG oslo_concurrency.lockutils [None req-f081d78e-1cf7-4043-9a57-caa0299432de tempest-ImagesOneServerTestJSON-2032343034 tempest-ImagesOneServerTestJSON-2032343034-project-member] Lock "6ba9719c-3689-4db6-8ab2-441dc52c7bfd" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61962) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 550.513233] env[61962]: DEBUG nova.compute.manager [None req-f081d78e-1cf7-4043-9a57-caa0299432de tempest-ImagesOneServerTestJSON-2032343034 tempest-ImagesOneServerTestJSON-2032343034-project-member] [instance: 6ba9719c-3689-4db6-8ab2-441dc52c7bfd] Starting instance... {{(pid=61962) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2424}} [ 550.596366] env[61962]: DEBUG nova.network.neutron [None req-29af0b05-c0ff-41ef-8103-714fd2cae28f tempest-ServerDiagnosticsNegativeTest-1190275590 tempest-ServerDiagnosticsNegativeTest-1190275590-project-member] [instance: 53921884-f617-4020-b779-650becf89a58] Successfully created port: e8b63a66-f8b4-4d6d-9a01-6238f6cebbcc {{(pid=61962) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 550.600432] env[61962]: DEBUG oslo_concurrency.lockutils [None req-f081d78e-1cf7-4043-9a57-caa0299432de tempest-ImagesOneServerTestJSON-2032343034 tempest-ImagesOneServerTestJSON-2032343034-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61962) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 550.600696] env[61962]: DEBUG oslo_concurrency.lockutils [None req-f081d78e-1cf7-4043-9a57-caa0299432de tempest-ImagesOneServerTestJSON-2032343034 tempest-ImagesOneServerTestJSON-2032343034-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=61962) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 550.602200] env[61962]: INFO nova.compute.claims [None req-f081d78e-1cf7-4043-9a57-caa0299432de tempest-ImagesOneServerTestJSON-2032343034 tempest-ImagesOneServerTestJSON-2032343034-project-member] [instance: 6ba9719c-3689-4db6-8ab2-441dc52c7bfd] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 550.772243] env[61962]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f94e52b2-adb1-4b08-90dc-d90524cad283 {{(pid=61962) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 550.782789] env[61962]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d2480730-0266-476d-acc9-07876b5b6f5a {{(pid=61962) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 550.817708] env[61962]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dc7d47b4-a6c5-4d48-bb90-f625f92b3a88 {{(pid=61962) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 550.826294] env[61962]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f6b16624-d1d1-43a9-a7b5-e897870e3763 {{(pid=61962) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 550.842931] env[61962]: DEBUG nova.compute.provider_tree [None req-f081d78e-1cf7-4043-9a57-caa0299432de tempest-ImagesOneServerTestJSON-2032343034 tempest-ImagesOneServerTestJSON-2032343034-project-member] Inventory has not changed in ProviderTree for provider: 5a20dc57-fddd-49ec-bab5-953a03eebaa1 {{(pid=61962) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 550.856230] env[61962]: DEBUG nova.scheduler.client.report [None req-f081d78e-1cf7-4043-9a57-caa0299432de tempest-ImagesOneServerTestJSON-2032343034 tempest-ImagesOneServerTestJSON-2032343034-project-member] Inventory has not changed for provider 5a20dc57-fddd-49ec-bab5-953a03eebaa1 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 96, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61962) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 550.882469] env[61962]: DEBUG oslo_concurrency.lockutils [None req-f081d78e-1cf7-4043-9a57-caa0299432de tempest-ImagesOneServerTestJSON-2032343034 tempest-ImagesOneServerTestJSON-2032343034-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.279s {{(pid=61962) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 550.882469] env[61962]: DEBUG nova.compute.manager [None req-f081d78e-1cf7-4043-9a57-caa0299432de tempest-ImagesOneServerTestJSON-2032343034 tempest-ImagesOneServerTestJSON-2032343034-project-member] [instance: 6ba9719c-3689-4db6-8ab2-441dc52c7bfd] Start building networks asynchronously for instance. {{(pid=61962) _build_resources /opt/stack/nova/nova/compute/manager.py:2821}} [ 550.926787] env[61962]: DEBUG nova.virt.vmwareapi.vmops [None req-98f60c33-8db1-481d-81f0-6e45672909f4 tempest-ServerDiagnosticsV248Test-33376295 tempest-ServerDiagnosticsV248Test-33376295-project-member] [instance: 06028841-b941-4352-b5c1-fa71cc87a4a7] Preparing fetch location {{(pid=61962) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 550.927115] env[61962]: DEBUG nova.virt.vmwareapi.ds_util [None req-98f60c33-8db1-481d-81f0-6e45672909f4 tempest-ServerDiagnosticsV248Test-33376295 tempest-ServerDiagnosticsV248Test-33376295-project-member] Creating directory with path [datastore2] vmware_temp/fd0d674d-87ea-46eb-bfc8-13a952dc5cdc/f684bb17-3ab2-4bd5-a19e-4c36c57d0ebe {{(pid=61962) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 550.927318] env[61962]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-9e82bca8-096a-41e4-a33d-f2ee9b7e6c8c {{(pid=61962) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 550.934359] env[61962]: DEBUG nova.compute.utils [None req-f081d78e-1cf7-4043-9a57-caa0299432de tempest-ImagesOneServerTestJSON-2032343034 tempest-ImagesOneServerTestJSON-2032343034-project-member] Using /dev/sd instead of None {{(pid=61962) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 550.935191] env[61962]: DEBUG nova.compute.manager [None req-f081d78e-1cf7-4043-9a57-caa0299432de tempest-ImagesOneServerTestJSON-2032343034 tempest-ImagesOneServerTestJSON-2032343034-project-member] [instance: 6ba9719c-3689-4db6-8ab2-441dc52c7bfd] Allocating IP information in the background. {{(pid=61962) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1973}} [ 550.935374] env[61962]: DEBUG nova.network.neutron [None req-f081d78e-1cf7-4043-9a57-caa0299432de tempest-ImagesOneServerTestJSON-2032343034 tempest-ImagesOneServerTestJSON-2032343034-project-member] [instance: 6ba9719c-3689-4db6-8ab2-441dc52c7bfd] allocate_for_instance() {{(pid=61962) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 550.948875] env[61962]: DEBUG nova.compute.manager [None req-f081d78e-1cf7-4043-9a57-caa0299432de tempest-ImagesOneServerTestJSON-2032343034 tempest-ImagesOneServerTestJSON-2032343034-project-member] [instance: 6ba9719c-3689-4db6-8ab2-441dc52c7bfd] Start building block device mappings for instance. {{(pid=61962) _build_resources /opt/stack/nova/nova/compute/manager.py:2856}} [ 550.955604] env[61962]: DEBUG nova.virt.vmwareapi.ds_util [None req-98f60c33-8db1-481d-81f0-6e45672909f4 tempest-ServerDiagnosticsV248Test-33376295 tempest-ServerDiagnosticsV248Test-33376295-project-member] Created directory with path [datastore2] vmware_temp/fd0d674d-87ea-46eb-bfc8-13a952dc5cdc/f684bb17-3ab2-4bd5-a19e-4c36c57d0ebe {{(pid=61962) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 550.955821] env[61962]: DEBUG nova.virt.vmwareapi.vmops [None req-98f60c33-8db1-481d-81f0-6e45672909f4 tempest-ServerDiagnosticsV248Test-33376295 tempest-ServerDiagnosticsV248Test-33376295-project-member] [instance: 06028841-b941-4352-b5c1-fa71cc87a4a7] Fetch image to [datastore2] vmware_temp/fd0d674d-87ea-46eb-bfc8-13a952dc5cdc/f684bb17-3ab2-4bd5-a19e-4c36c57d0ebe/tmp-sparse.vmdk {{(pid=61962) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 550.956957] env[61962]: DEBUG nova.virt.vmwareapi.vmops [None req-98f60c33-8db1-481d-81f0-6e45672909f4 tempest-ServerDiagnosticsV248Test-33376295 tempest-ServerDiagnosticsV248Test-33376295-project-member] [instance: 06028841-b941-4352-b5c1-fa71cc87a4a7] Downloading image file data f684bb17-3ab2-4bd5-a19e-4c36c57d0ebe to [datastore2] vmware_temp/fd0d674d-87ea-46eb-bfc8-13a952dc5cdc/f684bb17-3ab2-4bd5-a19e-4c36c57d0ebe/tmp-sparse.vmdk on the data store datastore2 {{(pid=61962) _fetch_image_as_file /opt/stack/nova/nova/virt/vmwareapi/vmops.py:399}} [ 550.956957] env[61962]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-772027a0-d5ab-45ce-9e24-e93467678d06 {{(pid=61962) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 550.967147] env[61962]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f8275000-ab79-48f9-bff4-51cd360244cc {{(pid=61962) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 550.980279] env[61962]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-087e4404-30dd-43d0-90cb-7da8cb01f3f6 {{(pid=61962) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 551.016349] env[61962]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b27ff51d-1b7d-4a46-98a6-cf0ea80f9977 {{(pid=61962) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 551.030767] env[61962]: DEBUG oslo_vmware.service [-] Invoking SessionManager.AcquireGenericServiceTicket with opID=oslo.vmware-7f7bcb55-41b1-4a6c-a502-c7a3b186f0c1 {{(pid=61962) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 551.068801] env[61962]: DEBUG nova.virt.vmwareapi.images [None req-98f60c33-8db1-481d-81f0-6e45672909f4 tempest-ServerDiagnosticsV248Test-33376295 tempest-ServerDiagnosticsV248Test-33376295-project-member] [instance: 06028841-b941-4352-b5c1-fa71cc87a4a7] Downloading image file data f684bb17-3ab2-4bd5-a19e-4c36c57d0ebe to the data store datastore2 {{(pid=61962) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:245}} [ 551.079704] env[61962]: DEBUG nova.compute.manager [None req-f081d78e-1cf7-4043-9a57-caa0299432de tempest-ImagesOneServerTestJSON-2032343034 tempest-ImagesOneServerTestJSON-2032343034-project-member] [instance: 6ba9719c-3689-4db6-8ab2-441dc52c7bfd] Start spawning the instance on the hypervisor. {{(pid=61962) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2630}} [ 551.124615] env[61962]: DEBUG nova.virt.hardware [None req-f081d78e-1cf7-4043-9a57-caa0299432de tempest-ImagesOneServerTestJSON-2032343034 tempest-ImagesOneServerTestJSON-2032343034-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-12-01T12:09:23Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=128,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-12-01T12:09:07Z,direct_url=,disk_format='vmdk',id=f684bb17-3ab2-4bd5-a19e-4c36c57d0ebe,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='eef556fb5c4f49b889491ae31a496de5',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-12-01T12:09:08Z,virtual_size=,visibility=), allow threads: False {{(pid=61962) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 551.125379] env[61962]: DEBUG nova.virt.hardware [None req-f081d78e-1cf7-4043-9a57-caa0299432de tempest-ImagesOneServerTestJSON-2032343034 tempest-ImagesOneServerTestJSON-2032343034-project-member] Flavor limits 0:0:0 {{(pid=61962) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 551.125379] env[61962]: DEBUG nova.virt.hardware [None req-f081d78e-1cf7-4043-9a57-caa0299432de tempest-ImagesOneServerTestJSON-2032343034 tempest-ImagesOneServerTestJSON-2032343034-project-member] Image limits 0:0:0 {{(pid=61962) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 551.125379] env[61962]: DEBUG nova.virt.hardware [None req-f081d78e-1cf7-4043-9a57-caa0299432de tempest-ImagesOneServerTestJSON-2032343034 tempest-ImagesOneServerTestJSON-2032343034-project-member] Flavor pref 0:0:0 {{(pid=61962) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 551.125556] env[61962]: DEBUG nova.virt.hardware [None req-f081d78e-1cf7-4043-9a57-caa0299432de tempest-ImagesOneServerTestJSON-2032343034 tempest-ImagesOneServerTestJSON-2032343034-project-member] Image pref 0:0:0 {{(pid=61962) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 551.125597] env[61962]: DEBUG nova.virt.hardware [None req-f081d78e-1cf7-4043-9a57-caa0299432de tempest-ImagesOneServerTestJSON-2032343034 tempest-ImagesOneServerTestJSON-2032343034-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61962) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 551.125843] env[61962]: DEBUG nova.virt.hardware [None req-f081d78e-1cf7-4043-9a57-caa0299432de tempest-ImagesOneServerTestJSON-2032343034 tempest-ImagesOneServerTestJSON-2032343034-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61962) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 551.126139] env[61962]: DEBUG nova.virt.hardware [None req-f081d78e-1cf7-4043-9a57-caa0299432de tempest-ImagesOneServerTestJSON-2032343034 tempest-ImagesOneServerTestJSON-2032343034-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61962) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 551.126267] env[61962]: DEBUG nova.virt.hardware [None req-f081d78e-1cf7-4043-9a57-caa0299432de tempest-ImagesOneServerTestJSON-2032343034 tempest-ImagesOneServerTestJSON-2032343034-project-member] Got 1 possible topologies {{(pid=61962) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 551.126464] env[61962]: DEBUG nova.virt.hardware [None req-f081d78e-1cf7-4043-9a57-caa0299432de tempest-ImagesOneServerTestJSON-2032343034 tempest-ImagesOneServerTestJSON-2032343034-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61962) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 551.126651] env[61962]: DEBUG nova.virt.hardware [None req-f081d78e-1cf7-4043-9a57-caa0299432de tempest-ImagesOneServerTestJSON-2032343034 tempest-ImagesOneServerTestJSON-2032343034-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61962) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 551.127534] env[61962]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-eff2ac79-730d-4386-a134-73328ad6cd89 {{(pid=61962) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 551.139054] env[61962]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-250b3d50-0f06-413e-9680-3f0a4dd2cb2e {{(pid=61962) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 551.167906] env[61962]: DEBUG oslo_vmware.rw_handles [None req-98f60c33-8db1-481d-81f0-6e45672909f4 tempest-ServerDiagnosticsV248Test-33376295 tempest-ServerDiagnosticsV248Test-33376295-project-member] Creating HTTP connection to write to file with size = 21318656 and URL = https://esx7c2n3.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/fd0d674d-87ea-46eb-bfc8-13a952dc5cdc/f684bb17-3ab2-4bd5-a19e-4c36c57d0ebe/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=61962) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 551.239065] env[61962]: DEBUG oslo_vmware.rw_handles [None req-98f60c33-8db1-481d-81f0-6e45672909f4 tempest-ServerDiagnosticsV248Test-33376295 tempest-ServerDiagnosticsV248Test-33376295-project-member] Completed reading data from the image iterator. {{(pid=61962) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 551.239065] env[61962]: DEBUG oslo_vmware.rw_handles [None req-98f60c33-8db1-481d-81f0-6e45672909f4 tempest-ServerDiagnosticsV248Test-33376295 tempest-ServerDiagnosticsV248Test-33376295-project-member] Closing write handle for https://esx7c2n3.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/fd0d674d-87ea-46eb-bfc8-13a952dc5cdc/f684bb17-3ab2-4bd5-a19e-4c36c57d0ebe/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=61962) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:281}} [ 551.243935] env[61962]: DEBUG nova.policy [None req-f081d78e-1cf7-4043-9a57-caa0299432de tempest-ImagesOneServerTestJSON-2032343034 tempest-ImagesOneServerTestJSON-2032343034-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '5c90c011f1154b59bf40c183dbd37ec8', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '1dd984fe1bf84471ae103d28565d160b', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61962) authorize /opt/stack/nova/nova/policy.py:203}} [ 551.310636] env[61962]: DEBUG oslo_concurrency.lockutils [None req-eaddb9ef-13ca-4c01-8d47-47e1b7975f32 tempest-TenantUsagesTestJSON-1044362109 tempest-TenantUsagesTestJSON-1044362109-project-member] Acquiring lock "1a7d5a5f-c5b1-40d2-828b-090fb8e76e3d" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61962) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 551.310775] env[61962]: DEBUG oslo_concurrency.lockutils [None req-eaddb9ef-13ca-4c01-8d47-47e1b7975f32 tempest-TenantUsagesTestJSON-1044362109 tempest-TenantUsagesTestJSON-1044362109-project-member] Lock "1a7d5a5f-c5b1-40d2-828b-090fb8e76e3d" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61962) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 551.331215] env[61962]: DEBUG nova.compute.manager [None req-eaddb9ef-13ca-4c01-8d47-47e1b7975f32 tempest-TenantUsagesTestJSON-1044362109 tempest-TenantUsagesTestJSON-1044362109-project-member] [instance: 1a7d5a5f-c5b1-40d2-828b-090fb8e76e3d] Starting instance... {{(pid=61962) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2424}} [ 551.444653] env[61962]: DEBUG oslo_concurrency.lockutils [None req-eaddb9ef-13ca-4c01-8d47-47e1b7975f32 tempest-TenantUsagesTestJSON-1044362109 tempest-TenantUsagesTestJSON-1044362109-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61962) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 551.445495] env[61962]: DEBUG oslo_concurrency.lockutils [None req-eaddb9ef-13ca-4c01-8d47-47e1b7975f32 tempest-TenantUsagesTestJSON-1044362109 tempest-TenantUsagesTestJSON-1044362109-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=61962) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 551.447045] env[61962]: INFO nova.compute.claims [None req-eaddb9ef-13ca-4c01-8d47-47e1b7975f32 tempest-TenantUsagesTestJSON-1044362109 tempest-TenantUsagesTestJSON-1044362109-project-member] [instance: 1a7d5a5f-c5b1-40d2-828b-090fb8e76e3d] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 551.684131] env[61962]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c8ce18aa-7296-4f73-b71d-cad5115ad6cb {{(pid=61962) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 551.694581] env[61962]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8c080d5b-a6a3-4a46-966f-40a3a8c81bae {{(pid=61962) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 551.731250] env[61962]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c94bc722-e928-48d0-8786-1c21b90123f3 {{(pid=61962) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 551.740067] env[61962]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f43fecda-3400-474a-beda-761be9b6ef8d {{(pid=61962) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 551.757707] env[61962]: DEBUG nova.compute.provider_tree [None req-eaddb9ef-13ca-4c01-8d47-47e1b7975f32 tempest-TenantUsagesTestJSON-1044362109 tempest-TenantUsagesTestJSON-1044362109-project-member] Inventory has not changed in ProviderTree for provider: 5a20dc57-fddd-49ec-bab5-953a03eebaa1 {{(pid=61962) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 551.771987] env[61962]: DEBUG nova.scheduler.client.report [None req-eaddb9ef-13ca-4c01-8d47-47e1b7975f32 tempest-TenantUsagesTestJSON-1044362109 tempest-TenantUsagesTestJSON-1044362109-project-member] Inventory has not changed for provider 5a20dc57-fddd-49ec-bab5-953a03eebaa1 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 96, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61962) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 551.813064] env[61962]: DEBUG oslo_concurrency.lockutils [None req-eaddb9ef-13ca-4c01-8d47-47e1b7975f32 tempest-TenantUsagesTestJSON-1044362109 tempest-TenantUsagesTestJSON-1044362109-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.368s {{(pid=61962) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 551.813702] env[61962]: DEBUG nova.compute.manager [None req-eaddb9ef-13ca-4c01-8d47-47e1b7975f32 tempest-TenantUsagesTestJSON-1044362109 tempest-TenantUsagesTestJSON-1044362109-project-member] [instance: 1a7d5a5f-c5b1-40d2-828b-090fb8e76e3d] Start building networks asynchronously for instance. {{(pid=61962) _build_resources /opt/stack/nova/nova/compute/manager.py:2821}} [ 551.899685] env[61962]: DEBUG nova.compute.utils [None req-eaddb9ef-13ca-4c01-8d47-47e1b7975f32 tempest-TenantUsagesTestJSON-1044362109 tempest-TenantUsagesTestJSON-1044362109-project-member] Using /dev/sd instead of None {{(pid=61962) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 551.900611] env[61962]: DEBUG nova.compute.manager [None req-eaddb9ef-13ca-4c01-8d47-47e1b7975f32 tempest-TenantUsagesTestJSON-1044362109 tempest-TenantUsagesTestJSON-1044362109-project-member] [instance: 1a7d5a5f-c5b1-40d2-828b-090fb8e76e3d] Allocating IP information in the background. {{(pid=61962) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1973}} [ 551.900611] env[61962]: DEBUG nova.network.neutron [None req-eaddb9ef-13ca-4c01-8d47-47e1b7975f32 tempest-TenantUsagesTestJSON-1044362109 tempest-TenantUsagesTestJSON-1044362109-project-member] [instance: 1a7d5a5f-c5b1-40d2-828b-090fb8e76e3d] allocate_for_instance() {{(pid=61962) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 551.920200] env[61962]: DEBUG nova.compute.manager [None req-eaddb9ef-13ca-4c01-8d47-47e1b7975f32 tempest-TenantUsagesTestJSON-1044362109 tempest-TenantUsagesTestJSON-1044362109-project-member] [instance: 1a7d5a5f-c5b1-40d2-828b-090fb8e76e3d] Start building block device mappings for instance. {{(pid=61962) _build_resources /opt/stack/nova/nova/compute/manager.py:2856}} [ 552.070418] env[61962]: DEBUG nova.compute.manager [None req-eaddb9ef-13ca-4c01-8d47-47e1b7975f32 tempest-TenantUsagesTestJSON-1044362109 tempest-TenantUsagesTestJSON-1044362109-project-member] [instance: 1a7d5a5f-c5b1-40d2-828b-090fb8e76e3d] Start spawning the instance on the hypervisor. {{(pid=61962) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2630}} [ 552.125064] env[61962]: DEBUG nova.virt.hardware [None req-eaddb9ef-13ca-4c01-8d47-47e1b7975f32 tempest-TenantUsagesTestJSON-1044362109 tempest-TenantUsagesTestJSON-1044362109-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-12-01T12:09:23Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=128,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-12-01T12:09:07Z,direct_url=,disk_format='vmdk',id=f684bb17-3ab2-4bd5-a19e-4c36c57d0ebe,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='eef556fb5c4f49b889491ae31a496de5',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-12-01T12:09:08Z,virtual_size=,visibility=), allow threads: False {{(pid=61962) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 552.125305] env[61962]: DEBUG nova.virt.hardware [None req-eaddb9ef-13ca-4c01-8d47-47e1b7975f32 tempest-TenantUsagesTestJSON-1044362109 tempest-TenantUsagesTestJSON-1044362109-project-member] Flavor limits 0:0:0 {{(pid=61962) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 552.125461] env[61962]: DEBUG nova.virt.hardware [None req-eaddb9ef-13ca-4c01-8d47-47e1b7975f32 tempest-TenantUsagesTestJSON-1044362109 tempest-TenantUsagesTestJSON-1044362109-project-member] Image limits 0:0:0 {{(pid=61962) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 552.125669] env[61962]: DEBUG nova.virt.hardware [None req-eaddb9ef-13ca-4c01-8d47-47e1b7975f32 tempest-TenantUsagesTestJSON-1044362109 tempest-TenantUsagesTestJSON-1044362109-project-member] Flavor pref 0:0:0 {{(pid=61962) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 552.125844] env[61962]: DEBUG nova.virt.hardware [None req-eaddb9ef-13ca-4c01-8d47-47e1b7975f32 tempest-TenantUsagesTestJSON-1044362109 tempest-TenantUsagesTestJSON-1044362109-project-member] Image pref 0:0:0 {{(pid=61962) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 552.125995] env[61962]: DEBUG nova.virt.hardware [None req-eaddb9ef-13ca-4c01-8d47-47e1b7975f32 tempest-TenantUsagesTestJSON-1044362109 tempest-TenantUsagesTestJSON-1044362109-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61962) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 552.127324] env[61962]: DEBUG nova.virt.hardware [None req-eaddb9ef-13ca-4c01-8d47-47e1b7975f32 tempest-TenantUsagesTestJSON-1044362109 tempest-TenantUsagesTestJSON-1044362109-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61962) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 552.128401] env[61962]: DEBUG nova.virt.hardware [None req-eaddb9ef-13ca-4c01-8d47-47e1b7975f32 tempest-TenantUsagesTestJSON-1044362109 tempest-TenantUsagesTestJSON-1044362109-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61962) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 552.128640] env[61962]: DEBUG nova.virt.hardware [None req-eaddb9ef-13ca-4c01-8d47-47e1b7975f32 tempest-TenantUsagesTestJSON-1044362109 tempest-TenantUsagesTestJSON-1044362109-project-member] Got 1 possible topologies {{(pid=61962) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 552.128826] env[61962]: DEBUG nova.virt.hardware [None req-eaddb9ef-13ca-4c01-8d47-47e1b7975f32 tempest-TenantUsagesTestJSON-1044362109 tempest-TenantUsagesTestJSON-1044362109-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61962) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 552.129127] env[61962]: DEBUG nova.virt.hardware [None req-eaddb9ef-13ca-4c01-8d47-47e1b7975f32 tempest-TenantUsagesTestJSON-1044362109 tempest-TenantUsagesTestJSON-1044362109-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61962) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 552.130268] env[61962]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-33d7f267-501f-43f5-aa37-5d2da82f3ae4 {{(pid=61962) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 552.141518] env[61962]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-46dfeaab-52b2-46fe-988c-f20e8f79d41d {{(pid=61962) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 552.289766] env[61962]: DEBUG oslo_concurrency.lockutils [None req-911f86c7-75e1-4661-8b4b-23bb532c1f1c tempest-ImagesTestJSON-240028984 tempest-ImagesTestJSON-240028984-project-member] Acquiring lock "d0df932e-822b-4c60-a588-445aa2f14cdb" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61962) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 552.289766] env[61962]: DEBUG oslo_concurrency.lockutils [None req-911f86c7-75e1-4661-8b4b-23bb532c1f1c tempest-ImagesTestJSON-240028984 tempest-ImagesTestJSON-240028984-project-member] Lock "d0df932e-822b-4c60-a588-445aa2f14cdb" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61962) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 552.315417] env[61962]: DEBUG nova.compute.manager [None req-911f86c7-75e1-4661-8b4b-23bb532c1f1c tempest-ImagesTestJSON-240028984 tempest-ImagesTestJSON-240028984-project-member] [instance: d0df932e-822b-4c60-a588-445aa2f14cdb] Starting instance... {{(pid=61962) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2424}} [ 552.401952] env[61962]: DEBUG nova.network.neutron [None req-21d4b7db-8868-406d-878a-d4e21834a00a tempest-ServerExternalEventsTest-1541707263 tempest-ServerExternalEventsTest-1541707263-project-member] [instance: dc6c9cb6-419f-4122-8351-0c517fb8616d] Successfully created port: d0601b5c-744b-4423-8d06-01097a3c2e82 {{(pid=61962) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 552.420497] env[61962]: DEBUG oslo_concurrency.lockutils [None req-911f86c7-75e1-4661-8b4b-23bb532c1f1c tempest-ImagesTestJSON-240028984 tempest-ImagesTestJSON-240028984-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61962) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 552.421482] env[61962]: DEBUG oslo_concurrency.lockutils [None req-911f86c7-75e1-4661-8b4b-23bb532c1f1c tempest-ImagesTestJSON-240028984 tempest-ImagesTestJSON-240028984-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.001s {{(pid=61962) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 552.423256] env[61962]: INFO nova.compute.claims [None req-911f86c7-75e1-4661-8b4b-23bb532c1f1c tempest-ImagesTestJSON-240028984 tempest-ImagesTestJSON-240028984-project-member] [instance: d0df932e-822b-4c60-a588-445aa2f14cdb] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 552.466912] env[61962]: DEBUG nova.policy [None req-eaddb9ef-13ca-4c01-8d47-47e1b7975f32 tempest-TenantUsagesTestJSON-1044362109 tempest-TenantUsagesTestJSON-1044362109-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'fb0feef857e5435093becee3adb57d02', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '13cf9ead87c749fc963fbe35b22b4fd6', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61962) authorize /opt/stack/nova/nova/policy.py:203}} [ 552.571029] env[61962]: DEBUG nova.network.neutron [None req-fbaae0eb-893e-49a1-93b8-ba5ae997b76b tempest-ServerDiagnosticsTest-151015285 tempest-ServerDiagnosticsTest-151015285-project-member] [instance: 495acb0f-5b45-457e-9218-db57a3612289] Successfully updated port: f8f22fe7-d92c-4cfa-b645-fc641223c320 {{(pid=61962) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 552.590729] env[61962]: DEBUG oslo_concurrency.lockutils [None req-fbaae0eb-893e-49a1-93b8-ba5ae997b76b tempest-ServerDiagnosticsTest-151015285 tempest-ServerDiagnosticsTest-151015285-project-member] Acquiring lock "refresh_cache-495acb0f-5b45-457e-9218-db57a3612289" {{(pid=61962) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 552.590729] env[61962]: DEBUG oslo_concurrency.lockutils [None req-fbaae0eb-893e-49a1-93b8-ba5ae997b76b tempest-ServerDiagnosticsTest-151015285 tempest-ServerDiagnosticsTest-151015285-project-member] Acquired lock "refresh_cache-495acb0f-5b45-457e-9218-db57a3612289" {{(pid=61962) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 552.590729] env[61962]: DEBUG nova.network.neutron [None req-fbaae0eb-893e-49a1-93b8-ba5ae997b76b tempest-ServerDiagnosticsTest-151015285 tempest-ServerDiagnosticsTest-151015285-project-member] [instance: 495acb0f-5b45-457e-9218-db57a3612289] Building network info cache for instance {{(pid=61962) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 552.649425] env[61962]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-94bd7da2-5ab7-4a1c-be8e-aa7028d374d7 {{(pid=61962) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 552.656557] env[61962]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-54215fee-21e7-4a76-b376-d0cb73aee4e9 {{(pid=61962) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 552.688933] env[61962]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d545badd-8708-4a18-ae3d-d39ebd487a5d {{(pid=61962) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 552.699201] env[61962]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bb8b62e3-d2d6-4601-a5c9-1451850e8422 {{(pid=61962) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 552.717074] env[61962]: DEBUG nova.compute.provider_tree [None req-911f86c7-75e1-4661-8b4b-23bb532c1f1c tempest-ImagesTestJSON-240028984 tempest-ImagesTestJSON-240028984-project-member] Inventory has not changed in ProviderTree for provider: 5a20dc57-fddd-49ec-bab5-953a03eebaa1 {{(pid=61962) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 552.726876] env[61962]: DEBUG nova.network.neutron [None req-fbaae0eb-893e-49a1-93b8-ba5ae997b76b tempest-ServerDiagnosticsTest-151015285 tempest-ServerDiagnosticsTest-151015285-project-member] [instance: 495acb0f-5b45-457e-9218-db57a3612289] Instance cache missing network info. {{(pid=61962) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 552.734129] env[61962]: DEBUG nova.scheduler.client.report [None req-911f86c7-75e1-4661-8b4b-23bb532c1f1c tempest-ImagesTestJSON-240028984 tempest-ImagesTestJSON-240028984-project-member] Inventory has not changed for provider 5a20dc57-fddd-49ec-bab5-953a03eebaa1 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 96, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61962) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 552.755527] env[61962]: DEBUG oslo_concurrency.lockutils [None req-911f86c7-75e1-4661-8b4b-23bb532c1f1c tempest-ImagesTestJSON-240028984 tempest-ImagesTestJSON-240028984-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.335s {{(pid=61962) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 552.756352] env[61962]: DEBUG nova.compute.manager [None req-911f86c7-75e1-4661-8b4b-23bb532c1f1c tempest-ImagesTestJSON-240028984 tempest-ImagesTestJSON-240028984-project-member] [instance: d0df932e-822b-4c60-a588-445aa2f14cdb] Start building networks asynchronously for instance. {{(pid=61962) _build_resources /opt/stack/nova/nova/compute/manager.py:2821}} [ 552.835259] env[61962]: DEBUG nova.compute.utils [None req-911f86c7-75e1-4661-8b4b-23bb532c1f1c tempest-ImagesTestJSON-240028984 tempest-ImagesTestJSON-240028984-project-member] Using /dev/sd instead of None {{(pid=61962) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 552.836650] env[61962]: DEBUG nova.compute.manager [None req-911f86c7-75e1-4661-8b4b-23bb532c1f1c tempest-ImagesTestJSON-240028984 tempest-ImagesTestJSON-240028984-project-member] [instance: d0df932e-822b-4c60-a588-445aa2f14cdb] Allocating IP information in the background. {{(pid=61962) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1973}} [ 552.836824] env[61962]: DEBUG nova.network.neutron [None req-911f86c7-75e1-4661-8b4b-23bb532c1f1c tempest-ImagesTestJSON-240028984 tempest-ImagesTestJSON-240028984-project-member] [instance: d0df932e-822b-4c60-a588-445aa2f14cdb] allocate_for_instance() {{(pid=61962) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 552.857960] env[61962]: DEBUG nova.compute.manager [None req-911f86c7-75e1-4661-8b4b-23bb532c1f1c tempest-ImagesTestJSON-240028984 tempest-ImagesTestJSON-240028984-project-member] [instance: d0df932e-822b-4c60-a588-445aa2f14cdb] Start building block device mappings for instance. {{(pid=61962) _build_resources /opt/stack/nova/nova/compute/manager.py:2856}} [ 552.973387] env[61962]: DEBUG nova.compute.manager [None req-911f86c7-75e1-4661-8b4b-23bb532c1f1c tempest-ImagesTestJSON-240028984 tempest-ImagesTestJSON-240028984-project-member] [instance: d0df932e-822b-4c60-a588-445aa2f14cdb] Start spawning the instance on the hypervisor. {{(pid=61962) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2630}} [ 553.005810] env[61962]: DEBUG nova.virt.hardware [None req-911f86c7-75e1-4661-8b4b-23bb532c1f1c tempest-ImagesTestJSON-240028984 tempest-ImagesTestJSON-240028984-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-12-01T12:09:23Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=128,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-12-01T12:09:07Z,direct_url=,disk_format='vmdk',id=f684bb17-3ab2-4bd5-a19e-4c36c57d0ebe,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='eef556fb5c4f49b889491ae31a496de5',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-12-01T12:09:08Z,virtual_size=,visibility=), allow threads: False {{(pid=61962) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 553.007339] env[61962]: DEBUG nova.virt.hardware [None req-911f86c7-75e1-4661-8b4b-23bb532c1f1c tempest-ImagesTestJSON-240028984 tempest-ImagesTestJSON-240028984-project-member] Flavor limits 0:0:0 {{(pid=61962) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 553.007586] env[61962]: DEBUG nova.virt.hardware [None req-911f86c7-75e1-4661-8b4b-23bb532c1f1c tempest-ImagesTestJSON-240028984 tempest-ImagesTestJSON-240028984-project-member] Image limits 0:0:0 {{(pid=61962) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 553.007839] env[61962]: DEBUG nova.virt.hardware [None req-911f86c7-75e1-4661-8b4b-23bb532c1f1c tempest-ImagesTestJSON-240028984 tempest-ImagesTestJSON-240028984-project-member] Flavor pref 0:0:0 {{(pid=61962) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 553.009520] env[61962]: DEBUG nova.virt.hardware [None req-911f86c7-75e1-4661-8b4b-23bb532c1f1c tempest-ImagesTestJSON-240028984 tempest-ImagesTestJSON-240028984-project-member] Image pref 0:0:0 {{(pid=61962) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 553.009520] env[61962]: DEBUG nova.virt.hardware [None req-911f86c7-75e1-4661-8b4b-23bb532c1f1c tempest-ImagesTestJSON-240028984 tempest-ImagesTestJSON-240028984-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61962) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 553.009520] env[61962]: DEBUG nova.virt.hardware [None req-911f86c7-75e1-4661-8b4b-23bb532c1f1c tempest-ImagesTestJSON-240028984 tempest-ImagesTestJSON-240028984-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61962) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 553.009520] env[61962]: DEBUG nova.virt.hardware [None req-911f86c7-75e1-4661-8b4b-23bb532c1f1c tempest-ImagesTestJSON-240028984 tempest-ImagesTestJSON-240028984-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61962) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 553.009520] env[61962]: DEBUG nova.virt.hardware [None req-911f86c7-75e1-4661-8b4b-23bb532c1f1c tempest-ImagesTestJSON-240028984 tempest-ImagesTestJSON-240028984-project-member] Got 1 possible topologies {{(pid=61962) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 553.009778] env[61962]: DEBUG nova.virt.hardware [None req-911f86c7-75e1-4661-8b4b-23bb532c1f1c tempest-ImagesTestJSON-240028984 tempest-ImagesTestJSON-240028984-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61962) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 553.009778] env[61962]: DEBUG nova.virt.hardware [None req-911f86c7-75e1-4661-8b4b-23bb532c1f1c tempest-ImagesTestJSON-240028984 tempest-ImagesTestJSON-240028984-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61962) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 553.010752] env[61962]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-062c1a42-a3b1-46df-ab86-43de3620465e {{(pid=61962) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 553.021881] env[61962]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c57502cd-be21-4591-831c-1180506f314a {{(pid=61962) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 553.190925] env[61962]: DEBUG nova.network.neutron [None req-f081d78e-1cf7-4043-9a57-caa0299432de tempest-ImagesOneServerTestJSON-2032343034 tempest-ImagesOneServerTestJSON-2032343034-project-member] [instance: 6ba9719c-3689-4db6-8ab2-441dc52c7bfd] Successfully created port: e176502f-775b-4502-891f-1388d15e7655 {{(pid=61962) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 553.227972] env[61962]: DEBUG nova.policy [None req-911f86c7-75e1-4661-8b4b-23bb532c1f1c tempest-ImagesTestJSON-240028984 tempest-ImagesTestJSON-240028984-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '84e0d67cbeef490f920225baaa7ef0bc', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'e58b68272c404983993a09ad12e8c03b', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61962) authorize /opt/stack/nova/nova/policy.py:203}} [ 553.637959] env[61962]: DEBUG nova.network.neutron [None req-fbaae0eb-893e-49a1-93b8-ba5ae997b76b tempest-ServerDiagnosticsTest-151015285 tempest-ServerDiagnosticsTest-151015285-project-member] [instance: 495acb0f-5b45-457e-9218-db57a3612289] Updating instance_info_cache with network_info: [{"id": "f8f22fe7-d92c-4cfa-b645-fc641223c320", "address": "fa:16:3e:09:0b:4a", "network": {"id": "dce9257e-a339-459f-9ce0-b9e3ec044f23", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.103", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "eef556fb5c4f49b889491ae31a496de5", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a69ed1dd-213a-4e30-992a-466735188bf6", "external-id": "nsx-vlan-transportzone-102", "segmentation_id": 102, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapf8f22fe7-d9", "ovs_interfaceid": "f8f22fe7-d92c-4cfa-b645-fc641223c320", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61962) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 553.666344] env[61962]: DEBUG oslo_concurrency.lockutils [None req-fbaae0eb-893e-49a1-93b8-ba5ae997b76b tempest-ServerDiagnosticsTest-151015285 tempest-ServerDiagnosticsTest-151015285-project-member] Releasing lock "refresh_cache-495acb0f-5b45-457e-9218-db57a3612289" {{(pid=61962) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 553.666344] env[61962]: DEBUG nova.compute.manager [None req-fbaae0eb-893e-49a1-93b8-ba5ae997b76b tempest-ServerDiagnosticsTest-151015285 tempest-ServerDiagnosticsTest-151015285-project-member] [instance: 495acb0f-5b45-457e-9218-db57a3612289] Instance network_info: |[{"id": "f8f22fe7-d92c-4cfa-b645-fc641223c320", "address": "fa:16:3e:09:0b:4a", "network": {"id": "dce9257e-a339-459f-9ce0-b9e3ec044f23", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.103", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "eef556fb5c4f49b889491ae31a496de5", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a69ed1dd-213a-4e30-992a-466735188bf6", "external-id": "nsx-vlan-transportzone-102", "segmentation_id": 102, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapf8f22fe7-d9", "ovs_interfaceid": "f8f22fe7-d92c-4cfa-b645-fc641223c320", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=61962) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 553.666663] env[61962]: DEBUG nova.virt.vmwareapi.vmops [None req-fbaae0eb-893e-49a1-93b8-ba5ae997b76b tempest-ServerDiagnosticsTest-151015285 tempest-ServerDiagnosticsTest-151015285-project-member] [instance: 495acb0f-5b45-457e-9218-db57a3612289] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:09:0b:4a', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'a69ed1dd-213a-4e30-992a-466735188bf6', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'f8f22fe7-d92c-4cfa-b645-fc641223c320', 'vif_model': 'vmxnet3'}] {{(pid=61962) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 553.681062] env[61962]: DEBUG nova.virt.vmwareapi.vm_util [None req-fbaae0eb-893e-49a1-93b8-ba5ae997b76b tempest-ServerDiagnosticsTest-151015285 tempest-ServerDiagnosticsTest-151015285-project-member] Creating folder: Project (5387c68fd60c4139a463bca1d3255da0). Parent ref: group-v953327. {{(pid=61962) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 553.683329] env[61962]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-2e999d82-e875-4598-9444-547a19bd4d6f {{(pid=61962) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 553.695307] env[61962]: INFO nova.virt.vmwareapi.vm_util [None req-fbaae0eb-893e-49a1-93b8-ba5ae997b76b tempest-ServerDiagnosticsTest-151015285 tempest-ServerDiagnosticsTest-151015285-project-member] Created folder: Project (5387c68fd60c4139a463bca1d3255da0) in parent group-v953327. [ 553.696769] env[61962]: DEBUG nova.virt.vmwareapi.vm_util [None req-fbaae0eb-893e-49a1-93b8-ba5ae997b76b tempest-ServerDiagnosticsTest-151015285 tempest-ServerDiagnosticsTest-151015285-project-member] Creating folder: Instances. Parent ref: group-v953331. {{(pid=61962) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 553.697410] env[61962]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-8a918d9d-9730-4659-bdb4-d5415a80eb79 {{(pid=61962) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 553.711084] env[61962]: INFO nova.virt.vmwareapi.vm_util [None req-fbaae0eb-893e-49a1-93b8-ba5ae997b76b tempest-ServerDiagnosticsTest-151015285 tempest-ServerDiagnosticsTest-151015285-project-member] Created folder: Instances in parent group-v953331. [ 553.711467] env[61962]: DEBUG oslo.service.loopingcall [None req-fbaae0eb-893e-49a1-93b8-ba5ae997b76b tempest-ServerDiagnosticsTest-151015285 tempest-ServerDiagnosticsTest-151015285-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=61962) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 553.711735] env[61962]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 495acb0f-5b45-457e-9218-db57a3612289] Creating VM on the ESX host {{(pid=61962) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 553.712677] env[61962]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-5dcedfd3-d4c7-4c0f-adbc-8608ac267923 {{(pid=61962) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 553.742023] env[61962]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 553.742023] env[61962]: value = "task-4891904" [ 553.742023] env[61962]: _type = "Task" [ 553.742023] env[61962]: } to complete. {{(pid=61962) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 553.753898] env[61962]: DEBUG oslo_vmware.api [-] Task: {'id': task-4891904, 'name': CreateVM_Task} progress is 0%. {{(pid=61962) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 554.186216] env[61962]: DEBUG nova.network.neutron [None req-29af0b05-c0ff-41ef-8103-714fd2cae28f tempest-ServerDiagnosticsNegativeTest-1190275590 tempest-ServerDiagnosticsNegativeTest-1190275590-project-member] [instance: 53921884-f617-4020-b779-650becf89a58] Successfully updated port: e8b63a66-f8b4-4d6d-9a01-6238f6cebbcc {{(pid=61962) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 554.238867] env[61962]: DEBUG oslo_concurrency.lockutils [None req-29af0b05-c0ff-41ef-8103-714fd2cae28f tempest-ServerDiagnosticsNegativeTest-1190275590 tempest-ServerDiagnosticsNegativeTest-1190275590-project-member] Acquiring lock "refresh_cache-53921884-f617-4020-b779-650becf89a58" {{(pid=61962) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 554.238867] env[61962]: DEBUG oslo_concurrency.lockutils [None req-29af0b05-c0ff-41ef-8103-714fd2cae28f tempest-ServerDiagnosticsNegativeTest-1190275590 tempest-ServerDiagnosticsNegativeTest-1190275590-project-member] Acquired lock "refresh_cache-53921884-f617-4020-b779-650becf89a58" {{(pid=61962) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 554.238867] env[61962]: DEBUG nova.network.neutron [None req-29af0b05-c0ff-41ef-8103-714fd2cae28f tempest-ServerDiagnosticsNegativeTest-1190275590 tempest-ServerDiagnosticsNegativeTest-1190275590-project-member] [instance: 53921884-f617-4020-b779-650becf89a58] Building network info cache for instance {{(pid=61962) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 554.270890] env[61962]: DEBUG oslo_vmware.api [-] Task: {'id': task-4891904, 'name': CreateVM_Task, 'duration_secs': 0.365841} completed successfully. {{(pid=61962) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 554.274034] env[61962]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 495acb0f-5b45-457e-9218-db57a3612289] Created VM on the ESX host {{(pid=61962) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 554.306837] env[61962]: DEBUG oslo_concurrency.lockutils [None req-fbaae0eb-893e-49a1-93b8-ba5ae997b76b tempest-ServerDiagnosticsTest-151015285 tempest-ServerDiagnosticsTest-151015285-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/f684bb17-3ab2-4bd5-a19e-4c36c57d0ebe" {{(pid=61962) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 554.307028] env[61962]: DEBUG oslo_concurrency.lockutils [None req-fbaae0eb-893e-49a1-93b8-ba5ae997b76b tempest-ServerDiagnosticsTest-151015285 tempest-ServerDiagnosticsTest-151015285-project-member] Acquired lock "[datastore2] devstack-image-cache_base/f684bb17-3ab2-4bd5-a19e-4c36c57d0ebe" {{(pid=61962) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 554.307666] env[61962]: DEBUG oslo_concurrency.lockutils [None req-fbaae0eb-893e-49a1-93b8-ba5ae997b76b tempest-ServerDiagnosticsTest-151015285 tempest-ServerDiagnosticsTest-151015285-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/f684bb17-3ab2-4bd5-a19e-4c36c57d0ebe" {{(pid=61962) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 554.307944] env[61962]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-0a78b2c5-c0b4-4bb4-8c4b-908825b04f4e {{(pid=61962) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 554.314891] env[61962]: DEBUG oslo_vmware.api [None req-fbaae0eb-893e-49a1-93b8-ba5ae997b76b tempest-ServerDiagnosticsTest-151015285 tempest-ServerDiagnosticsTest-151015285-project-member] Waiting for the task: (returnval){ [ 554.314891] env[61962]: value = "session[5210918f-aadc-9b29-42fa-0929c2e42627]52b6403e-3672-0153-2268-90ac7cb69cd6" [ 554.314891] env[61962]: _type = "Task" [ 554.314891] env[61962]: } to complete. {{(pid=61962) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 554.329945] env[61962]: DEBUG oslo_vmware.api [None req-fbaae0eb-893e-49a1-93b8-ba5ae997b76b tempest-ServerDiagnosticsTest-151015285 tempest-ServerDiagnosticsTest-151015285-project-member] Task: {'id': session[5210918f-aadc-9b29-42fa-0929c2e42627]52b6403e-3672-0153-2268-90ac7cb69cd6, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61962) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 554.385854] env[61962]: DEBUG nova.network.neutron [None req-29af0b05-c0ff-41ef-8103-714fd2cae28f tempest-ServerDiagnosticsNegativeTest-1190275590 tempest-ServerDiagnosticsNegativeTest-1190275590-project-member] [instance: 53921884-f617-4020-b779-650becf89a58] Instance cache missing network info. {{(pid=61962) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 554.830384] env[61962]: DEBUG oslo_concurrency.lockutils [None req-fbaae0eb-893e-49a1-93b8-ba5ae997b76b tempest-ServerDiagnosticsTest-151015285 tempest-ServerDiagnosticsTest-151015285-project-member] Releasing lock "[datastore2] devstack-image-cache_base/f684bb17-3ab2-4bd5-a19e-4c36c57d0ebe" {{(pid=61962) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 554.831318] env[61962]: DEBUG nova.virt.vmwareapi.vmops [None req-fbaae0eb-893e-49a1-93b8-ba5ae997b76b tempest-ServerDiagnosticsTest-151015285 tempest-ServerDiagnosticsTest-151015285-project-member] [instance: 495acb0f-5b45-457e-9218-db57a3612289] Processing image f684bb17-3ab2-4bd5-a19e-4c36c57d0ebe {{(pid=61962) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 554.831318] env[61962]: DEBUG oslo_concurrency.lockutils [None req-fbaae0eb-893e-49a1-93b8-ba5ae997b76b tempest-ServerDiagnosticsTest-151015285 tempest-ServerDiagnosticsTest-151015285-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/f684bb17-3ab2-4bd5-a19e-4c36c57d0ebe/f684bb17-3ab2-4bd5-a19e-4c36c57d0ebe.vmdk" {{(pid=61962) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 555.086833] env[61962]: DEBUG nova.network.neutron [None req-29af0b05-c0ff-41ef-8103-714fd2cae28f tempest-ServerDiagnosticsNegativeTest-1190275590 tempest-ServerDiagnosticsNegativeTest-1190275590-project-member] [instance: 53921884-f617-4020-b779-650becf89a58] Updating instance_info_cache with network_info: [{"id": "e8b63a66-f8b4-4d6d-9a01-6238f6cebbcc", "address": "fa:16:3e:d3:ed:c6", "network": {"id": "dce9257e-a339-459f-9ce0-b9e3ec044f23", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.17", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "eef556fb5c4f49b889491ae31a496de5", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a69ed1dd-213a-4e30-992a-466735188bf6", "external-id": "nsx-vlan-transportzone-102", "segmentation_id": 102, "bound_drivers": {"0": "nsxv3"}}, "devname": "tape8b63a66-f8", "ovs_interfaceid": "e8b63a66-f8b4-4d6d-9a01-6238f6cebbcc", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61962) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 555.107691] env[61962]: DEBUG oslo_concurrency.lockutils [None req-29af0b05-c0ff-41ef-8103-714fd2cae28f tempest-ServerDiagnosticsNegativeTest-1190275590 tempest-ServerDiagnosticsNegativeTest-1190275590-project-member] Releasing lock "refresh_cache-53921884-f617-4020-b779-650becf89a58" {{(pid=61962) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 555.107887] env[61962]: DEBUG nova.compute.manager [None req-29af0b05-c0ff-41ef-8103-714fd2cae28f tempest-ServerDiagnosticsNegativeTest-1190275590 tempest-ServerDiagnosticsNegativeTest-1190275590-project-member] [instance: 53921884-f617-4020-b779-650becf89a58] Instance network_info: |[{"id": "e8b63a66-f8b4-4d6d-9a01-6238f6cebbcc", "address": "fa:16:3e:d3:ed:c6", "network": {"id": "dce9257e-a339-459f-9ce0-b9e3ec044f23", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.17", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "eef556fb5c4f49b889491ae31a496de5", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a69ed1dd-213a-4e30-992a-466735188bf6", "external-id": "nsx-vlan-transportzone-102", "segmentation_id": 102, "bound_drivers": {"0": "nsxv3"}}, "devname": "tape8b63a66-f8", "ovs_interfaceid": "e8b63a66-f8b4-4d6d-9a01-6238f6cebbcc", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=61962) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 555.108640] env[61962]: DEBUG nova.virt.vmwareapi.vmops [None req-29af0b05-c0ff-41ef-8103-714fd2cae28f tempest-ServerDiagnosticsNegativeTest-1190275590 tempest-ServerDiagnosticsNegativeTest-1190275590-project-member] [instance: 53921884-f617-4020-b779-650becf89a58] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:d3:ed:c6', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'a69ed1dd-213a-4e30-992a-466735188bf6', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'e8b63a66-f8b4-4d6d-9a01-6238f6cebbcc', 'vif_model': 'vmxnet3'}] {{(pid=61962) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 555.118135] env[61962]: DEBUG nova.virt.vmwareapi.vm_util [None req-29af0b05-c0ff-41ef-8103-714fd2cae28f tempest-ServerDiagnosticsNegativeTest-1190275590 tempest-ServerDiagnosticsNegativeTest-1190275590-project-member] Creating folder: Project (c7a5eef5ba8d4ba88356ffba87aec75c). Parent ref: group-v953327. {{(pid=61962) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 555.119309] env[61962]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-9c082217-2bc0-4acb-8877-1bffdfd4f3a5 {{(pid=61962) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 555.132817] env[61962]: INFO nova.virt.vmwareapi.vm_util [None req-29af0b05-c0ff-41ef-8103-714fd2cae28f tempest-ServerDiagnosticsNegativeTest-1190275590 tempest-ServerDiagnosticsNegativeTest-1190275590-project-member] Created folder: Project (c7a5eef5ba8d4ba88356ffba87aec75c) in parent group-v953327. [ 555.132817] env[61962]: DEBUG nova.virt.vmwareapi.vm_util [None req-29af0b05-c0ff-41ef-8103-714fd2cae28f tempest-ServerDiagnosticsNegativeTest-1190275590 tempest-ServerDiagnosticsNegativeTest-1190275590-project-member] Creating folder: Instances. Parent ref: group-v953334. {{(pid=61962) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 555.133264] env[61962]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-e793c774-496a-48f2-9eb8-6ed98bfa9772 {{(pid=61962) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 555.149133] env[61962]: INFO nova.virt.vmwareapi.vm_util [None req-29af0b05-c0ff-41ef-8103-714fd2cae28f tempest-ServerDiagnosticsNegativeTest-1190275590 tempest-ServerDiagnosticsNegativeTest-1190275590-project-member] Created folder: Instances in parent group-v953334. [ 555.149520] env[61962]: DEBUG oslo.service.loopingcall [None req-29af0b05-c0ff-41ef-8103-714fd2cae28f tempest-ServerDiagnosticsNegativeTest-1190275590 tempest-ServerDiagnosticsNegativeTest-1190275590-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=61962) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 555.150037] env[61962]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 53921884-f617-4020-b779-650becf89a58] Creating VM on the ESX host {{(pid=61962) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 555.150090] env[61962]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-52fe4b51-7be3-4579-a94a-44ad0d6669d8 {{(pid=61962) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 555.189645] env[61962]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 555.189645] env[61962]: value = "task-4891907" [ 555.189645] env[61962]: _type = "Task" [ 555.189645] env[61962]: } to complete. {{(pid=61962) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 555.204516] env[61962]: DEBUG oslo_vmware.api [-] Task: {'id': task-4891907, 'name': CreateVM_Task} progress is 0%. {{(pid=61962) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 555.236448] env[61962]: DEBUG nova.network.neutron [None req-eaddb9ef-13ca-4c01-8d47-47e1b7975f32 tempest-TenantUsagesTestJSON-1044362109 tempest-TenantUsagesTestJSON-1044362109-project-member] [instance: 1a7d5a5f-c5b1-40d2-828b-090fb8e76e3d] Successfully created port: 0816a8d0-7081-4e73-b7dd-b432b1dd003f {{(pid=61962) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 555.295834] env[61962]: DEBUG nova.network.neutron [None req-21d4b7db-8868-406d-878a-d4e21834a00a tempest-ServerExternalEventsTest-1541707263 tempest-ServerExternalEventsTest-1541707263-project-member] [instance: dc6c9cb6-419f-4122-8351-0c517fb8616d] Successfully updated port: d0601b5c-744b-4423-8d06-01097a3c2e82 {{(pid=61962) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 555.339122] env[61962]: DEBUG oslo_concurrency.lockutils [None req-21d4b7db-8868-406d-878a-d4e21834a00a tempest-ServerExternalEventsTest-1541707263 tempest-ServerExternalEventsTest-1541707263-project-member] Acquiring lock "refresh_cache-dc6c9cb6-419f-4122-8351-0c517fb8616d" {{(pid=61962) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 555.339122] env[61962]: DEBUG oslo_concurrency.lockutils [None req-21d4b7db-8868-406d-878a-d4e21834a00a tempest-ServerExternalEventsTest-1541707263 tempest-ServerExternalEventsTest-1541707263-project-member] Acquired lock "refresh_cache-dc6c9cb6-419f-4122-8351-0c517fb8616d" {{(pid=61962) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 555.339228] env[61962]: DEBUG nova.network.neutron [None req-21d4b7db-8868-406d-878a-d4e21834a00a tempest-ServerExternalEventsTest-1541707263 tempest-ServerExternalEventsTest-1541707263-project-member] [instance: dc6c9cb6-419f-4122-8351-0c517fb8616d] Building network info cache for instance {{(pid=61962) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 555.488303] env[61962]: DEBUG nova.network.neutron [None req-21d4b7db-8868-406d-878a-d4e21834a00a tempest-ServerExternalEventsTest-1541707263 tempest-ServerExternalEventsTest-1541707263-project-member] [instance: dc6c9cb6-419f-4122-8351-0c517fb8616d] Instance cache missing network info. {{(pid=61962) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 555.512665] env[61962]: DEBUG nova.network.neutron [None req-911f86c7-75e1-4661-8b4b-23bb532c1f1c tempest-ImagesTestJSON-240028984 tempest-ImagesTestJSON-240028984-project-member] [instance: d0df932e-822b-4c60-a588-445aa2f14cdb] Successfully created port: a3da001e-9ef5-4b16-ac0f-1196d6253f2a {{(pid=61962) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 555.652927] env[61962]: DEBUG nova.compute.manager [req-745eb134-1cf9-4782-840a-0eff489afa55 req-62a73bc5-f7cd-4187-9140-4caaeee28b7e service nova] [instance: 495acb0f-5b45-457e-9218-db57a3612289] Received event network-vif-plugged-f8f22fe7-d92c-4cfa-b645-fc641223c320 {{(pid=61962) external_instance_event /opt/stack/nova/nova/compute/manager.py:11210}} [ 555.653626] env[61962]: DEBUG oslo_concurrency.lockutils [req-745eb134-1cf9-4782-840a-0eff489afa55 req-62a73bc5-f7cd-4187-9140-4caaeee28b7e service nova] Acquiring lock "495acb0f-5b45-457e-9218-db57a3612289-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=61962) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 555.653626] env[61962]: DEBUG oslo_concurrency.lockutils [req-745eb134-1cf9-4782-840a-0eff489afa55 req-62a73bc5-f7cd-4187-9140-4caaeee28b7e service nova] Lock "495acb0f-5b45-457e-9218-db57a3612289-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=61962) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 555.653847] env[61962]: DEBUG oslo_concurrency.lockutils [req-745eb134-1cf9-4782-840a-0eff489afa55 req-62a73bc5-f7cd-4187-9140-4caaeee28b7e service nova] Lock "495acb0f-5b45-457e-9218-db57a3612289-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=61962) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 555.657167] env[61962]: DEBUG nova.compute.manager [req-745eb134-1cf9-4782-840a-0eff489afa55 req-62a73bc5-f7cd-4187-9140-4caaeee28b7e service nova] [instance: 495acb0f-5b45-457e-9218-db57a3612289] No waiting events found dispatching network-vif-plugged-f8f22fe7-d92c-4cfa-b645-fc641223c320 {{(pid=61962) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 555.657489] env[61962]: WARNING nova.compute.manager [req-745eb134-1cf9-4782-840a-0eff489afa55 req-62a73bc5-f7cd-4187-9140-4caaeee28b7e service nova] [instance: 495acb0f-5b45-457e-9218-db57a3612289] Received unexpected event network-vif-plugged-f8f22fe7-d92c-4cfa-b645-fc641223c320 for instance with vm_state building and task_state spawning. [ 555.704346] env[61962]: DEBUG oslo_vmware.api [-] Task: {'id': task-4891907, 'name': CreateVM_Task, 'duration_secs': 0.406775} completed successfully. {{(pid=61962) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 555.704346] env[61962]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 53921884-f617-4020-b779-650becf89a58] Created VM on the ESX host {{(pid=61962) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 555.704700] env[61962]: DEBUG oslo_concurrency.lockutils [None req-29af0b05-c0ff-41ef-8103-714fd2cae28f tempest-ServerDiagnosticsNegativeTest-1190275590 tempest-ServerDiagnosticsNegativeTest-1190275590-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/f684bb17-3ab2-4bd5-a19e-4c36c57d0ebe" {{(pid=61962) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 555.704895] env[61962]: DEBUG oslo_concurrency.lockutils [None req-29af0b05-c0ff-41ef-8103-714fd2cae28f tempest-ServerDiagnosticsNegativeTest-1190275590 tempest-ServerDiagnosticsNegativeTest-1190275590-project-member] Acquired lock "[datastore2] devstack-image-cache_base/f684bb17-3ab2-4bd5-a19e-4c36c57d0ebe" {{(pid=61962) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 555.705243] env[61962]: DEBUG oslo_concurrency.lockutils [None req-29af0b05-c0ff-41ef-8103-714fd2cae28f tempest-ServerDiagnosticsNegativeTest-1190275590 tempest-ServerDiagnosticsNegativeTest-1190275590-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/f684bb17-3ab2-4bd5-a19e-4c36c57d0ebe" {{(pid=61962) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 555.705529] env[61962]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-e9f1a4e7-da64-440d-8eb9-7833c475c1bb {{(pid=61962) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 555.712956] env[61962]: DEBUG oslo_vmware.api [None req-29af0b05-c0ff-41ef-8103-714fd2cae28f tempest-ServerDiagnosticsNegativeTest-1190275590 tempest-ServerDiagnosticsNegativeTest-1190275590-project-member] Waiting for the task: (returnval){ [ 555.712956] env[61962]: value = "session[5210918f-aadc-9b29-42fa-0929c2e42627]5216dcc7-33c3-52fb-1580-4bc375c5e7c0" [ 555.712956] env[61962]: _type = "Task" [ 555.712956] env[61962]: } to complete. {{(pid=61962) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 555.724523] env[61962]: DEBUG oslo_vmware.api [None req-29af0b05-c0ff-41ef-8103-714fd2cae28f tempest-ServerDiagnosticsNegativeTest-1190275590 tempest-ServerDiagnosticsNegativeTest-1190275590-project-member] Task: {'id': session[5210918f-aadc-9b29-42fa-0929c2e42627]5216dcc7-33c3-52fb-1580-4bc375c5e7c0, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61962) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 556.228596] env[61962]: DEBUG oslo_concurrency.lockutils [None req-29af0b05-c0ff-41ef-8103-714fd2cae28f tempest-ServerDiagnosticsNegativeTest-1190275590 tempest-ServerDiagnosticsNegativeTest-1190275590-project-member] Releasing lock "[datastore2] devstack-image-cache_base/f684bb17-3ab2-4bd5-a19e-4c36c57d0ebe" {{(pid=61962) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 556.228596] env[61962]: DEBUG nova.virt.vmwareapi.vmops [None req-29af0b05-c0ff-41ef-8103-714fd2cae28f tempest-ServerDiagnosticsNegativeTest-1190275590 tempest-ServerDiagnosticsNegativeTest-1190275590-project-member] [instance: 53921884-f617-4020-b779-650becf89a58] Processing image f684bb17-3ab2-4bd5-a19e-4c36c57d0ebe {{(pid=61962) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 556.229022] env[61962]: DEBUG oslo_concurrency.lockutils [None req-29af0b05-c0ff-41ef-8103-714fd2cae28f tempest-ServerDiagnosticsNegativeTest-1190275590 tempest-ServerDiagnosticsNegativeTest-1190275590-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/f684bb17-3ab2-4bd5-a19e-4c36c57d0ebe/f684bb17-3ab2-4bd5-a19e-4c36c57d0ebe.vmdk" {{(pid=61962) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 556.256286] env[61962]: DEBUG nova.network.neutron [None req-21d4b7db-8868-406d-878a-d4e21834a00a tempest-ServerExternalEventsTest-1541707263 tempest-ServerExternalEventsTest-1541707263-project-member] [instance: dc6c9cb6-419f-4122-8351-0c517fb8616d] Updating instance_info_cache with network_info: [{"id": "d0601b5c-744b-4423-8d06-01097a3c2e82", "address": "fa:16:3e:23:5f:57", "network": {"id": "dce9257e-a339-459f-9ce0-b9e3ec044f23", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.77", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "eef556fb5c4f49b889491ae31a496de5", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a69ed1dd-213a-4e30-992a-466735188bf6", "external-id": "nsx-vlan-transportzone-102", "segmentation_id": 102, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapd0601b5c-74", "ovs_interfaceid": "d0601b5c-744b-4423-8d06-01097a3c2e82", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61962) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 556.273888] env[61962]: DEBUG oslo_concurrency.lockutils [None req-21d4b7db-8868-406d-878a-d4e21834a00a tempest-ServerExternalEventsTest-1541707263 tempest-ServerExternalEventsTest-1541707263-project-member] Releasing lock "refresh_cache-dc6c9cb6-419f-4122-8351-0c517fb8616d" {{(pid=61962) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 556.274222] env[61962]: DEBUG nova.compute.manager [None req-21d4b7db-8868-406d-878a-d4e21834a00a tempest-ServerExternalEventsTest-1541707263 tempest-ServerExternalEventsTest-1541707263-project-member] [instance: dc6c9cb6-419f-4122-8351-0c517fb8616d] Instance network_info: |[{"id": "d0601b5c-744b-4423-8d06-01097a3c2e82", "address": "fa:16:3e:23:5f:57", "network": {"id": "dce9257e-a339-459f-9ce0-b9e3ec044f23", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.77", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "eef556fb5c4f49b889491ae31a496de5", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a69ed1dd-213a-4e30-992a-466735188bf6", "external-id": "nsx-vlan-transportzone-102", "segmentation_id": 102, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapd0601b5c-74", "ovs_interfaceid": "d0601b5c-744b-4423-8d06-01097a3c2e82", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=61962) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 556.274991] env[61962]: DEBUG nova.virt.vmwareapi.vmops [None req-21d4b7db-8868-406d-878a-d4e21834a00a tempest-ServerExternalEventsTest-1541707263 tempest-ServerExternalEventsTest-1541707263-project-member] [instance: dc6c9cb6-419f-4122-8351-0c517fb8616d] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:23:5f:57', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'a69ed1dd-213a-4e30-992a-466735188bf6', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'd0601b5c-744b-4423-8d06-01097a3c2e82', 'vif_model': 'vmxnet3'}] {{(pid=61962) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 556.286326] env[61962]: DEBUG nova.virt.vmwareapi.vm_util [None req-21d4b7db-8868-406d-878a-d4e21834a00a tempest-ServerExternalEventsTest-1541707263 tempest-ServerExternalEventsTest-1541707263-project-member] Creating folder: Project (f1aff07cb3e74298b4e347bd4176fbf8). Parent ref: group-v953327. {{(pid=61962) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 556.287164] env[61962]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-f0bc235b-d4d8-4a73-99b1-a2fcdc8e3fae {{(pid=61962) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 556.300616] env[61962]: INFO nova.virt.vmwareapi.vm_util [None req-21d4b7db-8868-406d-878a-d4e21834a00a tempest-ServerExternalEventsTest-1541707263 tempest-ServerExternalEventsTest-1541707263-project-member] Created folder: Project (f1aff07cb3e74298b4e347bd4176fbf8) in parent group-v953327. [ 556.300616] env[61962]: DEBUG nova.virt.vmwareapi.vm_util [None req-21d4b7db-8868-406d-878a-d4e21834a00a tempest-ServerExternalEventsTest-1541707263 tempest-ServerExternalEventsTest-1541707263-project-member] Creating folder: Instances. Parent ref: group-v953337. {{(pid=61962) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 556.300616] env[61962]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-632f09be-0327-4fb1-a9ac-bf7bb2ec7b06 {{(pid=61962) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 556.312189] env[61962]: INFO nova.virt.vmwareapi.vm_util [None req-21d4b7db-8868-406d-878a-d4e21834a00a tempest-ServerExternalEventsTest-1541707263 tempest-ServerExternalEventsTest-1541707263-project-member] Created folder: Instances in parent group-v953337. [ 556.312450] env[61962]: DEBUG oslo.service.loopingcall [None req-21d4b7db-8868-406d-878a-d4e21834a00a tempest-ServerExternalEventsTest-1541707263 tempest-ServerExternalEventsTest-1541707263-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=61962) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 556.312649] env[61962]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: dc6c9cb6-419f-4122-8351-0c517fb8616d] Creating VM on the ESX host {{(pid=61962) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 556.313075] env[61962]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-f73fbee4-6fdb-4a26-b591-785185dc47a7 {{(pid=61962) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 556.338641] env[61962]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 556.338641] env[61962]: value = "task-4891910" [ 556.338641] env[61962]: _type = "Task" [ 556.338641] env[61962]: } to complete. {{(pid=61962) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 556.349543] env[61962]: DEBUG oslo_vmware.api [-] Task: {'id': task-4891910, 'name': CreateVM_Task} progress is 0%. {{(pid=61962) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 556.854230] env[61962]: DEBUG oslo_vmware.api [-] Task: {'id': task-4891910, 'name': CreateVM_Task, 'duration_secs': 0.380384} completed successfully. {{(pid=61962) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 556.854230] env[61962]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: dc6c9cb6-419f-4122-8351-0c517fb8616d] Created VM on the ESX host {{(pid=61962) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 556.854690] env[61962]: DEBUG oslo_concurrency.lockutils [None req-21d4b7db-8868-406d-878a-d4e21834a00a tempest-ServerExternalEventsTest-1541707263 tempest-ServerExternalEventsTest-1541707263-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/f684bb17-3ab2-4bd5-a19e-4c36c57d0ebe" {{(pid=61962) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 556.854869] env[61962]: DEBUG oslo_concurrency.lockutils [None req-21d4b7db-8868-406d-878a-d4e21834a00a tempest-ServerExternalEventsTest-1541707263 tempest-ServerExternalEventsTest-1541707263-project-member] Acquired lock "[datastore2] devstack-image-cache_base/f684bb17-3ab2-4bd5-a19e-4c36c57d0ebe" {{(pid=61962) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 556.855461] env[61962]: DEBUG oslo_concurrency.lockutils [None req-21d4b7db-8868-406d-878a-d4e21834a00a tempest-ServerExternalEventsTest-1541707263 tempest-ServerExternalEventsTest-1541707263-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/f684bb17-3ab2-4bd5-a19e-4c36c57d0ebe" {{(pid=61962) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 556.855755] env[61962]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-7a6f249d-ba33-4843-9146-36f626277b26 {{(pid=61962) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 556.863295] env[61962]: DEBUG oslo_vmware.api [None req-21d4b7db-8868-406d-878a-d4e21834a00a tempest-ServerExternalEventsTest-1541707263 tempest-ServerExternalEventsTest-1541707263-project-member] Waiting for the task: (returnval){ [ 556.863295] env[61962]: value = "session[5210918f-aadc-9b29-42fa-0929c2e42627]52b8b493-9722-61bd-aa0b-9af7b6481f63" [ 556.863295] env[61962]: _type = "Task" [ 556.863295] env[61962]: } to complete. {{(pid=61962) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 556.875019] env[61962]: DEBUG oslo_vmware.api [None req-21d4b7db-8868-406d-878a-d4e21834a00a tempest-ServerExternalEventsTest-1541707263 tempest-ServerExternalEventsTest-1541707263-project-member] Task: {'id': session[5210918f-aadc-9b29-42fa-0929c2e42627]52b8b493-9722-61bd-aa0b-9af7b6481f63, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61962) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 557.178204] env[61962]: DEBUG nova.network.neutron [None req-f081d78e-1cf7-4043-9a57-caa0299432de tempest-ImagesOneServerTestJSON-2032343034 tempest-ImagesOneServerTestJSON-2032343034-project-member] [instance: 6ba9719c-3689-4db6-8ab2-441dc52c7bfd] Successfully updated port: e176502f-775b-4502-891f-1388d15e7655 {{(pid=61962) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 557.225112] env[61962]: DEBUG oslo_concurrency.lockutils [None req-f081d78e-1cf7-4043-9a57-caa0299432de tempest-ImagesOneServerTestJSON-2032343034 tempest-ImagesOneServerTestJSON-2032343034-project-member] Acquiring lock "refresh_cache-6ba9719c-3689-4db6-8ab2-441dc52c7bfd" {{(pid=61962) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 557.225280] env[61962]: DEBUG oslo_concurrency.lockutils [None req-f081d78e-1cf7-4043-9a57-caa0299432de tempest-ImagesOneServerTestJSON-2032343034 tempest-ImagesOneServerTestJSON-2032343034-project-member] Acquired lock "refresh_cache-6ba9719c-3689-4db6-8ab2-441dc52c7bfd" {{(pid=61962) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 557.225330] env[61962]: DEBUG nova.network.neutron [None req-f081d78e-1cf7-4043-9a57-caa0299432de tempest-ImagesOneServerTestJSON-2032343034 tempest-ImagesOneServerTestJSON-2032343034-project-member] [instance: 6ba9719c-3689-4db6-8ab2-441dc52c7bfd] Building network info cache for instance {{(pid=61962) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 557.379210] env[61962]: DEBUG oslo_concurrency.lockutils [None req-21d4b7db-8868-406d-878a-d4e21834a00a tempest-ServerExternalEventsTest-1541707263 tempest-ServerExternalEventsTest-1541707263-project-member] Releasing lock "[datastore2] devstack-image-cache_base/f684bb17-3ab2-4bd5-a19e-4c36c57d0ebe" {{(pid=61962) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 557.379465] env[61962]: DEBUG nova.virt.vmwareapi.vmops [None req-21d4b7db-8868-406d-878a-d4e21834a00a tempest-ServerExternalEventsTest-1541707263 tempest-ServerExternalEventsTest-1541707263-project-member] [instance: dc6c9cb6-419f-4122-8351-0c517fb8616d] Processing image f684bb17-3ab2-4bd5-a19e-4c36c57d0ebe {{(pid=61962) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 557.379749] env[61962]: DEBUG oslo_concurrency.lockutils [None req-21d4b7db-8868-406d-878a-d4e21834a00a tempest-ServerExternalEventsTest-1541707263 tempest-ServerExternalEventsTest-1541707263-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/f684bb17-3ab2-4bd5-a19e-4c36c57d0ebe/f684bb17-3ab2-4bd5-a19e-4c36c57d0ebe.vmdk" {{(pid=61962) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 557.390175] env[61962]: DEBUG nova.network.neutron [None req-f081d78e-1cf7-4043-9a57-caa0299432de tempest-ImagesOneServerTestJSON-2032343034 tempest-ImagesOneServerTestJSON-2032343034-project-member] [instance: 6ba9719c-3689-4db6-8ab2-441dc52c7bfd] Instance cache missing network info. {{(pid=61962) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 558.182617] env[61962]: DEBUG nova.network.neutron [None req-f081d78e-1cf7-4043-9a57-caa0299432de tempest-ImagesOneServerTestJSON-2032343034 tempest-ImagesOneServerTestJSON-2032343034-project-member] [instance: 6ba9719c-3689-4db6-8ab2-441dc52c7bfd] Updating instance_info_cache with network_info: [{"id": "e176502f-775b-4502-891f-1388d15e7655", "address": "fa:16:3e:84:74:ea", "network": {"id": "f2b1b8ad-ff70-4ff0-ada9-139452e1a7cb", "bridge": "br-int", "label": "tempest-ImagesOneServerTestJSON-1241075281-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "1dd984fe1bf84471ae103d28565d160b", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ea4a9e02-45f1-4afb-8abb-0de26b153086", "external-id": "nsx-vlan-transportzone-336", "segmentation_id": 336, "bound_drivers": {"0": "nsxv3"}}, "devname": "tape176502f-77", "ovs_interfaceid": "e176502f-775b-4502-891f-1388d15e7655", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61962) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 558.208756] env[61962]: DEBUG oslo_concurrency.lockutils [None req-f081d78e-1cf7-4043-9a57-caa0299432de tempest-ImagesOneServerTestJSON-2032343034 tempest-ImagesOneServerTestJSON-2032343034-project-member] Releasing lock "refresh_cache-6ba9719c-3689-4db6-8ab2-441dc52c7bfd" {{(pid=61962) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 558.209089] env[61962]: DEBUG nova.compute.manager [None req-f081d78e-1cf7-4043-9a57-caa0299432de tempest-ImagesOneServerTestJSON-2032343034 tempest-ImagesOneServerTestJSON-2032343034-project-member] [instance: 6ba9719c-3689-4db6-8ab2-441dc52c7bfd] Instance network_info: |[{"id": "e176502f-775b-4502-891f-1388d15e7655", "address": "fa:16:3e:84:74:ea", "network": {"id": "f2b1b8ad-ff70-4ff0-ada9-139452e1a7cb", "bridge": "br-int", "label": "tempest-ImagesOneServerTestJSON-1241075281-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "1dd984fe1bf84471ae103d28565d160b", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ea4a9e02-45f1-4afb-8abb-0de26b153086", "external-id": "nsx-vlan-transportzone-336", "segmentation_id": 336, "bound_drivers": {"0": "nsxv3"}}, "devname": "tape176502f-77", "ovs_interfaceid": "e176502f-775b-4502-891f-1388d15e7655", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=61962) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 558.209543] env[61962]: DEBUG nova.virt.vmwareapi.vmops [None req-f081d78e-1cf7-4043-9a57-caa0299432de tempest-ImagesOneServerTestJSON-2032343034 tempest-ImagesOneServerTestJSON-2032343034-project-member] [instance: 6ba9719c-3689-4db6-8ab2-441dc52c7bfd] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:84:74:ea', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'ea4a9e02-45f1-4afb-8abb-0de26b153086', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'e176502f-775b-4502-891f-1388d15e7655', 'vif_model': 'vmxnet3'}] {{(pid=61962) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 558.221594] env[61962]: DEBUG nova.virt.vmwareapi.vm_util [None req-f081d78e-1cf7-4043-9a57-caa0299432de tempest-ImagesOneServerTestJSON-2032343034 tempest-ImagesOneServerTestJSON-2032343034-project-member] Creating folder: Project (1dd984fe1bf84471ae103d28565d160b). Parent ref: group-v953327. {{(pid=61962) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 558.222246] env[61962]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-ed3735d2-2901-46e2-959f-fe5c14b97507 {{(pid=61962) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 558.236749] env[61962]: INFO nova.virt.vmwareapi.vm_util [None req-f081d78e-1cf7-4043-9a57-caa0299432de tempest-ImagesOneServerTestJSON-2032343034 tempest-ImagesOneServerTestJSON-2032343034-project-member] Created folder: Project (1dd984fe1bf84471ae103d28565d160b) in parent group-v953327. [ 558.236749] env[61962]: DEBUG nova.virt.vmwareapi.vm_util [None req-f081d78e-1cf7-4043-9a57-caa0299432de tempest-ImagesOneServerTestJSON-2032343034 tempest-ImagesOneServerTestJSON-2032343034-project-member] Creating folder: Instances. Parent ref: group-v953340. {{(pid=61962) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 558.236865] env[61962]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-30563fc4-442b-40c3-8297-37edb77bf83a {{(pid=61962) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 558.247899] env[61962]: INFO nova.virt.vmwareapi.vm_util [None req-f081d78e-1cf7-4043-9a57-caa0299432de tempest-ImagesOneServerTestJSON-2032343034 tempest-ImagesOneServerTestJSON-2032343034-project-member] Created folder: Instances in parent group-v953340. [ 558.248589] env[61962]: DEBUG oslo.service.loopingcall [None req-f081d78e-1cf7-4043-9a57-caa0299432de tempest-ImagesOneServerTestJSON-2032343034 tempest-ImagesOneServerTestJSON-2032343034-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=61962) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 558.249245] env[61962]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 6ba9719c-3689-4db6-8ab2-441dc52c7bfd] Creating VM on the ESX host {{(pid=61962) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 558.249245] env[61962]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-13f304f0-6dec-4571-903a-5c28b0226a0e {{(pid=61962) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 558.275467] env[61962]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 558.275467] env[61962]: value = "task-4891913" [ 558.275467] env[61962]: _type = "Task" [ 558.275467] env[61962]: } to complete. {{(pid=61962) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 558.285303] env[61962]: DEBUG oslo_vmware.api [-] Task: {'id': task-4891913, 'name': CreateVM_Task} progress is 0%. {{(pid=61962) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 558.299382] env[61962]: DEBUG nova.compute.manager [req-96c9c9a2-1762-40aa-abd0-67a9b9aea713 req-c002cded-0f51-4122-82ae-c1373875a938 service nova] [instance: 6ba9719c-3689-4db6-8ab2-441dc52c7bfd] Received event network-vif-plugged-e176502f-775b-4502-891f-1388d15e7655 {{(pid=61962) external_instance_event /opt/stack/nova/nova/compute/manager.py:11210}} [ 558.299382] env[61962]: DEBUG oslo_concurrency.lockutils [req-96c9c9a2-1762-40aa-abd0-67a9b9aea713 req-c002cded-0f51-4122-82ae-c1373875a938 service nova] Acquiring lock "6ba9719c-3689-4db6-8ab2-441dc52c7bfd-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=61962) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 558.299382] env[61962]: DEBUG oslo_concurrency.lockutils [req-96c9c9a2-1762-40aa-abd0-67a9b9aea713 req-c002cded-0f51-4122-82ae-c1373875a938 service nova] Lock "6ba9719c-3689-4db6-8ab2-441dc52c7bfd-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=61962) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 558.299773] env[61962]: DEBUG oslo_concurrency.lockutils [req-96c9c9a2-1762-40aa-abd0-67a9b9aea713 req-c002cded-0f51-4122-82ae-c1373875a938 service nova] Lock "6ba9719c-3689-4db6-8ab2-441dc52c7bfd-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=61962) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 558.299773] env[61962]: DEBUG nova.compute.manager [req-96c9c9a2-1762-40aa-abd0-67a9b9aea713 req-c002cded-0f51-4122-82ae-c1373875a938 service nova] [instance: 6ba9719c-3689-4db6-8ab2-441dc52c7bfd] No waiting events found dispatching network-vif-plugged-e176502f-775b-4502-891f-1388d15e7655 {{(pid=61962) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 558.299845] env[61962]: WARNING nova.compute.manager [req-96c9c9a2-1762-40aa-abd0-67a9b9aea713 req-c002cded-0f51-4122-82ae-c1373875a938 service nova] [instance: 6ba9719c-3689-4db6-8ab2-441dc52c7bfd] Received unexpected event network-vif-plugged-e176502f-775b-4502-891f-1388d15e7655 for instance with vm_state building and task_state spawning. [ 558.348793] env[61962]: DEBUG nova.network.neutron [None req-eaddb9ef-13ca-4c01-8d47-47e1b7975f32 tempest-TenantUsagesTestJSON-1044362109 tempest-TenantUsagesTestJSON-1044362109-project-member] [instance: 1a7d5a5f-c5b1-40d2-828b-090fb8e76e3d] Successfully updated port: 0816a8d0-7081-4e73-b7dd-b432b1dd003f {{(pid=61962) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 558.362636] env[61962]: DEBUG oslo_concurrency.lockutils [None req-eaddb9ef-13ca-4c01-8d47-47e1b7975f32 tempest-TenantUsagesTestJSON-1044362109 tempest-TenantUsagesTestJSON-1044362109-project-member] Acquiring lock "refresh_cache-1a7d5a5f-c5b1-40d2-828b-090fb8e76e3d" {{(pid=61962) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 558.363100] env[61962]: DEBUG oslo_concurrency.lockutils [None req-eaddb9ef-13ca-4c01-8d47-47e1b7975f32 tempest-TenantUsagesTestJSON-1044362109 tempest-TenantUsagesTestJSON-1044362109-project-member] Acquired lock "refresh_cache-1a7d5a5f-c5b1-40d2-828b-090fb8e76e3d" {{(pid=61962) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 558.363366] env[61962]: DEBUG nova.network.neutron [None req-eaddb9ef-13ca-4c01-8d47-47e1b7975f32 tempest-TenantUsagesTestJSON-1044362109 tempest-TenantUsagesTestJSON-1044362109-project-member] [instance: 1a7d5a5f-c5b1-40d2-828b-090fb8e76e3d] Building network info cache for instance {{(pid=61962) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 558.503834] env[61962]: DEBUG nova.network.neutron [None req-eaddb9ef-13ca-4c01-8d47-47e1b7975f32 tempest-TenantUsagesTestJSON-1044362109 tempest-TenantUsagesTestJSON-1044362109-project-member] [instance: 1a7d5a5f-c5b1-40d2-828b-090fb8e76e3d] Instance cache missing network info. {{(pid=61962) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 558.561040] env[61962]: DEBUG nova.network.neutron [None req-911f86c7-75e1-4661-8b4b-23bb532c1f1c tempest-ImagesTestJSON-240028984 tempest-ImagesTestJSON-240028984-project-member] [instance: d0df932e-822b-4c60-a588-445aa2f14cdb] Successfully updated port: a3da001e-9ef5-4b16-ac0f-1196d6253f2a {{(pid=61962) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 558.580221] env[61962]: DEBUG oslo_concurrency.lockutils [None req-911f86c7-75e1-4661-8b4b-23bb532c1f1c tempest-ImagesTestJSON-240028984 tempest-ImagesTestJSON-240028984-project-member] Acquiring lock "refresh_cache-d0df932e-822b-4c60-a588-445aa2f14cdb" {{(pid=61962) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 558.580382] env[61962]: DEBUG oslo_concurrency.lockutils [None req-911f86c7-75e1-4661-8b4b-23bb532c1f1c tempest-ImagesTestJSON-240028984 tempest-ImagesTestJSON-240028984-project-member] Acquired lock "refresh_cache-d0df932e-822b-4c60-a588-445aa2f14cdb" {{(pid=61962) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 558.580530] env[61962]: DEBUG nova.network.neutron [None req-911f86c7-75e1-4661-8b4b-23bb532c1f1c tempest-ImagesTestJSON-240028984 tempest-ImagesTestJSON-240028984-project-member] [instance: d0df932e-822b-4c60-a588-445aa2f14cdb] Building network info cache for instance {{(pid=61962) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 558.681258] env[61962]: DEBUG nova.network.neutron [None req-911f86c7-75e1-4661-8b4b-23bb532c1f1c tempest-ImagesTestJSON-240028984 tempest-ImagesTestJSON-240028984-project-member] [instance: d0df932e-822b-4c60-a588-445aa2f14cdb] Instance cache missing network info. {{(pid=61962) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 558.789633] env[61962]: DEBUG oslo_vmware.api [-] Task: {'id': task-4891913, 'name': CreateVM_Task, 'duration_secs': 0.360781} completed successfully. {{(pid=61962) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 558.792379] env[61962]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 6ba9719c-3689-4db6-8ab2-441dc52c7bfd] Created VM on the ESX host {{(pid=61962) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 558.792379] env[61962]: DEBUG oslo_concurrency.lockutils [None req-f081d78e-1cf7-4043-9a57-caa0299432de tempest-ImagesOneServerTestJSON-2032343034 tempest-ImagesOneServerTestJSON-2032343034-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/f684bb17-3ab2-4bd5-a19e-4c36c57d0ebe" {{(pid=61962) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 558.792379] env[61962]: DEBUG oslo_concurrency.lockutils [None req-f081d78e-1cf7-4043-9a57-caa0299432de tempest-ImagesOneServerTestJSON-2032343034 tempest-ImagesOneServerTestJSON-2032343034-project-member] Acquired lock "[datastore2] devstack-image-cache_base/f684bb17-3ab2-4bd5-a19e-4c36c57d0ebe" {{(pid=61962) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 558.792379] env[61962]: DEBUG oslo_concurrency.lockutils [None req-f081d78e-1cf7-4043-9a57-caa0299432de tempest-ImagesOneServerTestJSON-2032343034 tempest-ImagesOneServerTestJSON-2032343034-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/f684bb17-3ab2-4bd5-a19e-4c36c57d0ebe" {{(pid=61962) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 558.792379] env[61962]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-160260a8-d974-4695-97b3-b3f7e7b91c01 {{(pid=61962) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 558.799698] env[61962]: DEBUG oslo_vmware.api [None req-f081d78e-1cf7-4043-9a57-caa0299432de tempest-ImagesOneServerTestJSON-2032343034 tempest-ImagesOneServerTestJSON-2032343034-project-member] Waiting for the task: (returnval){ [ 558.799698] env[61962]: value = "session[5210918f-aadc-9b29-42fa-0929c2e42627]527e8438-eda7-fa53-c154-d5b2036274f7" [ 558.799698] env[61962]: _type = "Task" [ 558.799698] env[61962]: } to complete. {{(pid=61962) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 558.814154] env[61962]: DEBUG oslo_vmware.api [None req-f081d78e-1cf7-4043-9a57-caa0299432de tempest-ImagesOneServerTestJSON-2032343034 tempest-ImagesOneServerTestJSON-2032343034-project-member] Task: {'id': session[5210918f-aadc-9b29-42fa-0929c2e42627]527e8438-eda7-fa53-c154-d5b2036274f7, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61962) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 559.089987] env[61962]: DEBUG nova.network.neutron [None req-eaddb9ef-13ca-4c01-8d47-47e1b7975f32 tempest-TenantUsagesTestJSON-1044362109 tempest-TenantUsagesTestJSON-1044362109-project-member] [instance: 1a7d5a5f-c5b1-40d2-828b-090fb8e76e3d] Updating instance_info_cache with network_info: [{"id": "0816a8d0-7081-4e73-b7dd-b432b1dd003f", "address": "fa:16:3e:b0:5a:a5", "network": {"id": "dce9257e-a339-459f-9ce0-b9e3ec044f23", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.216", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "eef556fb5c4f49b889491ae31a496de5", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a69ed1dd-213a-4e30-992a-466735188bf6", "external-id": "nsx-vlan-transportzone-102", "segmentation_id": 102, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap0816a8d0-70", "ovs_interfaceid": "0816a8d0-7081-4e73-b7dd-b432b1dd003f", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61962) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 559.103731] env[61962]: DEBUG oslo_concurrency.lockutils [None req-eaddb9ef-13ca-4c01-8d47-47e1b7975f32 tempest-TenantUsagesTestJSON-1044362109 tempest-TenantUsagesTestJSON-1044362109-project-member] Releasing lock "refresh_cache-1a7d5a5f-c5b1-40d2-828b-090fb8e76e3d" {{(pid=61962) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 559.103731] env[61962]: DEBUG nova.compute.manager [None req-eaddb9ef-13ca-4c01-8d47-47e1b7975f32 tempest-TenantUsagesTestJSON-1044362109 tempest-TenantUsagesTestJSON-1044362109-project-member] [instance: 1a7d5a5f-c5b1-40d2-828b-090fb8e76e3d] Instance network_info: |[{"id": "0816a8d0-7081-4e73-b7dd-b432b1dd003f", "address": "fa:16:3e:b0:5a:a5", "network": {"id": "dce9257e-a339-459f-9ce0-b9e3ec044f23", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.216", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "eef556fb5c4f49b889491ae31a496de5", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a69ed1dd-213a-4e30-992a-466735188bf6", "external-id": "nsx-vlan-transportzone-102", "segmentation_id": 102, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap0816a8d0-70", "ovs_interfaceid": "0816a8d0-7081-4e73-b7dd-b432b1dd003f", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=61962) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 559.103887] env[61962]: DEBUG nova.virt.vmwareapi.vmops [None req-eaddb9ef-13ca-4c01-8d47-47e1b7975f32 tempest-TenantUsagesTestJSON-1044362109 tempest-TenantUsagesTestJSON-1044362109-project-member] [instance: 1a7d5a5f-c5b1-40d2-828b-090fb8e76e3d] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:b0:5a:a5', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'a69ed1dd-213a-4e30-992a-466735188bf6', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '0816a8d0-7081-4e73-b7dd-b432b1dd003f', 'vif_model': 'vmxnet3'}] {{(pid=61962) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 559.115490] env[61962]: DEBUG nova.virt.vmwareapi.vm_util [None req-eaddb9ef-13ca-4c01-8d47-47e1b7975f32 tempest-TenantUsagesTestJSON-1044362109 tempest-TenantUsagesTestJSON-1044362109-project-member] Creating folder: Project (13cf9ead87c749fc963fbe35b22b4fd6). Parent ref: group-v953327. {{(pid=61962) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 559.116215] env[61962]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-307193b8-a104-4143-ab82-c74c9ecfbe87 {{(pid=61962) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 559.130119] env[61962]: INFO nova.virt.vmwareapi.vm_util [None req-eaddb9ef-13ca-4c01-8d47-47e1b7975f32 tempest-TenantUsagesTestJSON-1044362109 tempest-TenantUsagesTestJSON-1044362109-project-member] Created folder: Project (13cf9ead87c749fc963fbe35b22b4fd6) in parent group-v953327. [ 559.130119] env[61962]: DEBUG nova.virt.vmwareapi.vm_util [None req-eaddb9ef-13ca-4c01-8d47-47e1b7975f32 tempest-TenantUsagesTestJSON-1044362109 tempest-TenantUsagesTestJSON-1044362109-project-member] Creating folder: Instances. Parent ref: group-v953343. {{(pid=61962) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 559.130499] env[61962]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-b417b8b3-ee10-4e59-bfee-248204ca72d7 {{(pid=61962) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 559.140966] env[61962]: INFO nova.virt.vmwareapi.vm_util [None req-eaddb9ef-13ca-4c01-8d47-47e1b7975f32 tempest-TenantUsagesTestJSON-1044362109 tempest-TenantUsagesTestJSON-1044362109-project-member] Created folder: Instances in parent group-v953343. [ 559.141258] env[61962]: DEBUG oslo.service.loopingcall [None req-eaddb9ef-13ca-4c01-8d47-47e1b7975f32 tempest-TenantUsagesTestJSON-1044362109 tempest-TenantUsagesTestJSON-1044362109-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=61962) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 559.141460] env[61962]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 1a7d5a5f-c5b1-40d2-828b-090fb8e76e3d] Creating VM on the ESX host {{(pid=61962) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 559.141942] env[61962]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-bcc13f0d-1c71-4e9b-a0c6-33a6ec93bc98 {{(pid=61962) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 559.168308] env[61962]: DEBUG oslo_service.periodic_task [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=61962) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 559.168698] env[61962]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 559.168698] env[61962]: value = "task-4891916" [ 559.168698] env[61962]: _type = "Task" [ 559.168698] env[61962]: } to complete. {{(pid=61962) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 559.168931] env[61962]: DEBUG oslo_service.periodic_task [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=61962) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 559.169120] env[61962]: DEBUG nova.compute.manager [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Starting heal instance info cache {{(pid=61962) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10007}} [ 559.169430] env[61962]: DEBUG nova.compute.manager [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Rebuilding the list of instances to heal {{(pid=61962) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10011}} [ 559.181898] env[61962]: DEBUG oslo_vmware.api [-] Task: {'id': task-4891916, 'name': CreateVM_Task} progress is 0%. {{(pid=61962) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 559.204727] env[61962]: DEBUG nova.compute.manager [None req-ba337279-4320-40ca-b616-7e590432f203 None None] [instance: 495acb0f-5b45-457e-9218-db57a3612289] Skipping network cache update for instance because it is Building. {{(pid=61962) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10020}} [ 559.205019] env[61962]: DEBUG nova.compute.manager [None req-ba337279-4320-40ca-b616-7e590432f203 None None] [instance: 53921884-f617-4020-b779-650becf89a58] Skipping network cache update for instance because it is Building. {{(pid=61962) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10020}} [ 559.205213] env[61962]: DEBUG nova.compute.manager [None req-ba337279-4320-40ca-b616-7e590432f203 None None] [instance: 06028841-b941-4352-b5c1-fa71cc87a4a7] Skipping network cache update for instance because it is Building. {{(pid=61962) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10020}} [ 559.205359] env[61962]: DEBUG nova.compute.manager [None req-ba337279-4320-40ca-b616-7e590432f203 None None] [instance: dc6c9cb6-419f-4122-8351-0c517fb8616d] Skipping network cache update for instance because it is Building. {{(pid=61962) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10020}} [ 559.205940] env[61962]: DEBUG nova.compute.manager [None req-ba337279-4320-40ca-b616-7e590432f203 None None] [instance: 6ba9719c-3689-4db6-8ab2-441dc52c7bfd] Skipping network cache update for instance because it is Building. {{(pid=61962) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10020}} [ 559.205940] env[61962]: DEBUG nova.compute.manager [None req-ba337279-4320-40ca-b616-7e590432f203 None None] [instance: 1a7d5a5f-c5b1-40d2-828b-090fb8e76e3d] Skipping network cache update for instance because it is Building. {{(pid=61962) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10020}} [ 559.205940] env[61962]: DEBUG nova.compute.manager [None req-ba337279-4320-40ca-b616-7e590432f203 None None] [instance: d0df932e-822b-4c60-a588-445aa2f14cdb] Skipping network cache update for instance because it is Building. {{(pid=61962) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10020}} [ 559.206202] env[61962]: DEBUG nova.compute.manager [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Didn't find any instances for network info cache update. {{(pid=61962) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10093}} [ 559.207239] env[61962]: DEBUG oslo_service.periodic_task [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=61962) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 559.207239] env[61962]: DEBUG oslo_service.periodic_task [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=61962) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 559.207629] env[61962]: DEBUG oslo_service.periodic_task [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=61962) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 559.207980] env[61962]: DEBUG oslo_service.periodic_task [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=61962) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 559.208251] env[61962]: DEBUG oslo_service.periodic_task [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=61962) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 559.208515] env[61962]: DEBUG oslo_service.periodic_task [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=61962) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 559.208712] env[61962]: DEBUG nova.compute.manager [None req-ba337279-4320-40ca-b616-7e590432f203 None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=61962) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10626}} [ 559.209197] env[61962]: DEBUG oslo_service.periodic_task [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Running periodic task ComputeManager.update_available_resource {{(pid=61962) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 559.237022] env[61962]: DEBUG oslo_concurrency.lockutils [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=61962) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 559.237022] env[61962]: DEBUG oslo_concurrency.lockutils [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=61962) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 559.237022] env[61962]: DEBUG oslo_concurrency.lockutils [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=61962) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 559.237022] env[61962]: DEBUG nova.compute.resource_tracker [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=61962) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 559.238306] env[61962]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b1e73341-fca5-49b0-b482-86d2721b3471 {{(pid=61962) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 559.251158] env[61962]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-344390d5-32cb-4be1-b4ad-7ebab722b2f4 {{(pid=61962) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 559.271744] env[61962]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-be5b3ef0-5b0b-498e-8b71-c150a845f08f {{(pid=61962) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 559.280036] env[61962]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4700e9ad-c6f6-4dd2-a91c-7accb5d1b022 {{(pid=61962) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 559.317047] env[61962]: DEBUG nova.compute.resource_tracker [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=180582MB free_disk=96GB free_vcpus=48 pci_devices=None {{(pid=61962) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 559.317221] env[61962]: DEBUG oslo_concurrency.lockutils [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=61962) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 559.318177] env[61962]: DEBUG oslo_concurrency.lockutils [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=61962) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 559.329756] env[61962]: DEBUG oslo_concurrency.lockutils [None req-f081d78e-1cf7-4043-9a57-caa0299432de tempest-ImagesOneServerTestJSON-2032343034 tempest-ImagesOneServerTestJSON-2032343034-project-member] Releasing lock "[datastore2] devstack-image-cache_base/f684bb17-3ab2-4bd5-a19e-4c36c57d0ebe" {{(pid=61962) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 559.330026] env[61962]: DEBUG nova.virt.vmwareapi.vmops [None req-f081d78e-1cf7-4043-9a57-caa0299432de tempest-ImagesOneServerTestJSON-2032343034 tempest-ImagesOneServerTestJSON-2032343034-project-member] [instance: 6ba9719c-3689-4db6-8ab2-441dc52c7bfd] Processing image f684bb17-3ab2-4bd5-a19e-4c36c57d0ebe {{(pid=61962) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 559.330260] env[61962]: DEBUG oslo_concurrency.lockutils [None req-f081d78e-1cf7-4043-9a57-caa0299432de tempest-ImagesOneServerTestJSON-2032343034 tempest-ImagesOneServerTestJSON-2032343034-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/f684bb17-3ab2-4bd5-a19e-4c36c57d0ebe/f684bb17-3ab2-4bd5-a19e-4c36c57d0ebe.vmdk" {{(pid=61962) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 559.413999] env[61962]: DEBUG nova.compute.resource_tracker [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Instance 495acb0f-5b45-457e-9218-db57a3612289 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61962) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 559.414187] env[61962]: DEBUG nova.compute.resource_tracker [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Instance dc6c9cb6-419f-4122-8351-0c517fb8616d actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61962) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 559.414486] env[61962]: DEBUG nova.compute.resource_tracker [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Instance 53921884-f617-4020-b779-650becf89a58 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61962) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 559.414486] env[61962]: DEBUG nova.compute.resource_tracker [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Instance 06028841-b941-4352-b5c1-fa71cc87a4a7 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61962) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 559.414631] env[61962]: DEBUG nova.compute.resource_tracker [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Instance 6ba9719c-3689-4db6-8ab2-441dc52c7bfd actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61962) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 559.414897] env[61962]: DEBUG nova.compute.resource_tracker [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Instance 1a7d5a5f-c5b1-40d2-828b-090fb8e76e3d actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61962) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 559.414985] env[61962]: DEBUG nova.compute.resource_tracker [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Instance d0df932e-822b-4c60-a588-445aa2f14cdb actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61962) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 559.415254] env[61962]: DEBUG nova.compute.resource_tracker [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Total usable vcpus: 48, total allocated vcpus: 7 {{(pid=61962) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 559.415324] env[61962]: DEBUG nova.compute.resource_tracker [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=1408MB phys_disk=100GB used_disk=7GB total_vcpus=48 used_vcpus=7 pci_stats=[] stats={'failed_builds': '0', 'num_instances': '7', 'num_vm_building': '7', 'num_task_spawning': '7', 'num_os_type_None': '7', 'num_proj_5387c68fd60c4139a463bca1d3255da0': '1', 'io_workload': '7', 'num_proj_c7a5eef5ba8d4ba88356ffba87aec75c': '1', 'num_proj_30841c5d09234932ad2f2ee679f7ee5d': '1', 'num_proj_f1aff07cb3e74298b4e347bd4176fbf8': '1', 'num_proj_1dd984fe1bf84471ae103d28565d160b': '1', 'num_proj_13cf9ead87c749fc963fbe35b22b4fd6': '1', 'num_proj_e58b68272c404983993a09ad12e8c03b': '1'} {{(pid=61962) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 559.484235] env[61962]: DEBUG nova.network.neutron [None req-911f86c7-75e1-4661-8b4b-23bb532c1f1c tempest-ImagesTestJSON-240028984 tempest-ImagesTestJSON-240028984-project-member] [instance: d0df932e-822b-4c60-a588-445aa2f14cdb] Updating instance_info_cache with network_info: [{"id": "a3da001e-9ef5-4b16-ac0f-1196d6253f2a", "address": "fa:16:3e:51:1b:04", "network": {"id": "4128bf2d-763a-4743-9aa9-47746ebfaa93", "bridge": "br-int", "label": "tempest-ImagesTestJSON-1442080107-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "e58b68272c404983993a09ad12e8c03b", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "be8bd197-4b2b-46e7-88ea-2554b0438584", "external-id": "nsx-vlan-transportzone-338", "segmentation_id": 338, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapa3da001e-9e", "ovs_interfaceid": "a3da001e-9ef5-4b16-ac0f-1196d6253f2a", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61962) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 559.508420] env[61962]: DEBUG oslo_concurrency.lockutils [None req-911f86c7-75e1-4661-8b4b-23bb532c1f1c tempest-ImagesTestJSON-240028984 tempest-ImagesTestJSON-240028984-project-member] Releasing lock "refresh_cache-d0df932e-822b-4c60-a588-445aa2f14cdb" {{(pid=61962) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 559.508420] env[61962]: DEBUG nova.compute.manager [None req-911f86c7-75e1-4661-8b4b-23bb532c1f1c tempest-ImagesTestJSON-240028984 tempest-ImagesTestJSON-240028984-project-member] [instance: d0df932e-822b-4c60-a588-445aa2f14cdb] Instance network_info: |[{"id": "a3da001e-9ef5-4b16-ac0f-1196d6253f2a", "address": "fa:16:3e:51:1b:04", "network": {"id": "4128bf2d-763a-4743-9aa9-47746ebfaa93", "bridge": "br-int", "label": "tempest-ImagesTestJSON-1442080107-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "e58b68272c404983993a09ad12e8c03b", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "be8bd197-4b2b-46e7-88ea-2554b0438584", "external-id": "nsx-vlan-transportzone-338", "segmentation_id": 338, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapa3da001e-9e", "ovs_interfaceid": "a3da001e-9ef5-4b16-ac0f-1196d6253f2a", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=61962) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 559.508787] env[61962]: DEBUG nova.virt.vmwareapi.vmops [None req-911f86c7-75e1-4661-8b4b-23bb532c1f1c tempest-ImagesTestJSON-240028984 tempest-ImagesTestJSON-240028984-project-member] [instance: d0df932e-822b-4c60-a588-445aa2f14cdb] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:51:1b:04', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'be8bd197-4b2b-46e7-88ea-2554b0438584', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'a3da001e-9ef5-4b16-ac0f-1196d6253f2a', 'vif_model': 'vmxnet3'}] {{(pid=61962) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 559.520164] env[61962]: DEBUG nova.virt.vmwareapi.vm_util [None req-911f86c7-75e1-4661-8b4b-23bb532c1f1c tempest-ImagesTestJSON-240028984 tempest-ImagesTestJSON-240028984-project-member] Creating folder: Project (e58b68272c404983993a09ad12e8c03b). Parent ref: group-v953327. {{(pid=61962) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 559.521036] env[61962]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-05d20d69-4478-47ee-a3bc-6043ab19c134 {{(pid=61962) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 559.545640] env[61962]: INFO nova.virt.vmwareapi.vm_util [None req-911f86c7-75e1-4661-8b4b-23bb532c1f1c tempest-ImagesTestJSON-240028984 tempest-ImagesTestJSON-240028984-project-member] Created folder: Project (e58b68272c404983993a09ad12e8c03b) in parent group-v953327. [ 559.545886] env[61962]: DEBUG nova.virt.vmwareapi.vm_util [None req-911f86c7-75e1-4661-8b4b-23bb532c1f1c tempest-ImagesTestJSON-240028984 tempest-ImagesTestJSON-240028984-project-member] Creating folder: Instances. Parent ref: group-v953346. {{(pid=61962) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 559.546173] env[61962]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-d36fa203-6b8e-4f57-9052-c532e7bf96e6 {{(pid=61962) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 559.562858] env[61962]: INFO nova.virt.vmwareapi.vm_util [None req-911f86c7-75e1-4661-8b4b-23bb532c1f1c tempest-ImagesTestJSON-240028984 tempest-ImagesTestJSON-240028984-project-member] Created folder: Instances in parent group-v953346. [ 559.563072] env[61962]: DEBUG oslo.service.loopingcall [None req-911f86c7-75e1-4661-8b4b-23bb532c1f1c tempest-ImagesTestJSON-240028984 tempest-ImagesTestJSON-240028984-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=61962) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 559.566449] env[61962]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: d0df932e-822b-4c60-a588-445aa2f14cdb] Creating VM on the ESX host {{(pid=61962) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 559.568068] env[61962]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-0164f516-662a-4fea-9ead-b337e20a3c7b {{(pid=61962) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 559.594641] env[61962]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 559.594641] env[61962]: value = "task-4891919" [ 559.594641] env[61962]: _type = "Task" [ 559.594641] env[61962]: } to complete. {{(pid=61962) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 559.607186] env[61962]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d234e072-760b-4732-9d82-6dc76d443bec {{(pid=61962) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 559.609935] env[61962]: DEBUG oslo_vmware.api [-] Task: {'id': task-4891919, 'name': CreateVM_Task} progress is 0%. {{(pid=61962) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 559.615163] env[61962]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8dd19474-0bcf-4360-b9db-01f0a3e4afe9 {{(pid=61962) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 559.665165] env[61962]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c6d8f24e-a493-4441-942f-12f8121a250c {{(pid=61962) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 559.680715] env[61962]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-abccf141-ca06-4fa5-84eb-103cfb8fbca8 {{(pid=61962) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 559.706631] env[61962]: DEBUG nova.compute.provider_tree [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Inventory has not changed in ProviderTree for provider: 5a20dc57-fddd-49ec-bab5-953a03eebaa1 {{(pid=61962) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 559.707918] env[61962]: DEBUG oslo_vmware.api [-] Task: {'id': task-4891916, 'name': CreateVM_Task, 'duration_secs': 0.365545} completed successfully. {{(pid=61962) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 559.708403] env[61962]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 1a7d5a5f-c5b1-40d2-828b-090fb8e76e3d] Created VM on the ESX host {{(pid=61962) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 559.709188] env[61962]: DEBUG oslo_concurrency.lockutils [None req-eaddb9ef-13ca-4c01-8d47-47e1b7975f32 tempest-TenantUsagesTestJSON-1044362109 tempest-TenantUsagesTestJSON-1044362109-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/f684bb17-3ab2-4bd5-a19e-4c36c57d0ebe" {{(pid=61962) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 559.709351] env[61962]: DEBUG oslo_concurrency.lockutils [None req-eaddb9ef-13ca-4c01-8d47-47e1b7975f32 tempest-TenantUsagesTestJSON-1044362109 tempest-TenantUsagesTestJSON-1044362109-project-member] Acquired lock "[datastore2] devstack-image-cache_base/f684bb17-3ab2-4bd5-a19e-4c36c57d0ebe" {{(pid=61962) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 559.710123] env[61962]: DEBUG oslo_concurrency.lockutils [None req-eaddb9ef-13ca-4c01-8d47-47e1b7975f32 tempest-TenantUsagesTestJSON-1044362109 tempest-TenantUsagesTestJSON-1044362109-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/f684bb17-3ab2-4bd5-a19e-4c36c57d0ebe" {{(pid=61962) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 559.710123] env[61962]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-c9a4d2a5-ec6f-4f54-b955-4ef969a45317 {{(pid=61962) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 559.718161] env[61962]: DEBUG oslo_vmware.api [None req-eaddb9ef-13ca-4c01-8d47-47e1b7975f32 tempest-TenantUsagesTestJSON-1044362109 tempest-TenantUsagesTestJSON-1044362109-project-member] Waiting for the task: (returnval){ [ 559.718161] env[61962]: value = "session[5210918f-aadc-9b29-42fa-0929c2e42627]521bc777-7f40-75e3-b2c6-fce7762e812f" [ 559.718161] env[61962]: _type = "Task" [ 559.718161] env[61962]: } to complete. {{(pid=61962) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 559.719945] env[61962]: DEBUG nova.scheduler.client.report [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Inventory has not changed for provider 5a20dc57-fddd-49ec-bab5-953a03eebaa1 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 96, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61962) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 559.734143] env[61962]: DEBUG oslo_vmware.api [None req-eaddb9ef-13ca-4c01-8d47-47e1b7975f32 tempest-TenantUsagesTestJSON-1044362109 tempest-TenantUsagesTestJSON-1044362109-project-member] Task: {'id': session[5210918f-aadc-9b29-42fa-0929c2e42627]521bc777-7f40-75e3-b2c6-fce7762e812f, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61962) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 559.742566] env[61962]: DEBUG nova.compute.resource_tracker [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=61962) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 559.742784] env[61962]: DEBUG oslo_concurrency.lockutils [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 0.425s {{(pid=61962) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 560.065922] env[61962]: DEBUG nova.compute.manager [req-58a33329-dfe2-435e-af46-353e63f25571 req-749f17a8-fce1-48a2-a653-58765ef7e01a service nova] [instance: 495acb0f-5b45-457e-9218-db57a3612289] Received event network-changed-f8f22fe7-d92c-4cfa-b645-fc641223c320 {{(pid=61962) external_instance_event /opt/stack/nova/nova/compute/manager.py:11210}} [ 560.066207] env[61962]: DEBUG nova.compute.manager [req-58a33329-dfe2-435e-af46-353e63f25571 req-749f17a8-fce1-48a2-a653-58765ef7e01a service nova] [instance: 495acb0f-5b45-457e-9218-db57a3612289] Refreshing instance network info cache due to event network-changed-f8f22fe7-d92c-4cfa-b645-fc641223c320. {{(pid=61962) external_instance_event /opt/stack/nova/nova/compute/manager.py:11215}} [ 560.066302] env[61962]: DEBUG oslo_concurrency.lockutils [req-58a33329-dfe2-435e-af46-353e63f25571 req-749f17a8-fce1-48a2-a653-58765ef7e01a service nova] Acquiring lock "refresh_cache-495acb0f-5b45-457e-9218-db57a3612289" {{(pid=61962) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 560.066441] env[61962]: DEBUG oslo_concurrency.lockutils [req-58a33329-dfe2-435e-af46-353e63f25571 req-749f17a8-fce1-48a2-a653-58765ef7e01a service nova] Acquired lock "refresh_cache-495acb0f-5b45-457e-9218-db57a3612289" {{(pid=61962) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 560.066853] env[61962]: DEBUG nova.network.neutron [req-58a33329-dfe2-435e-af46-353e63f25571 req-749f17a8-fce1-48a2-a653-58765ef7e01a service nova] [instance: 495acb0f-5b45-457e-9218-db57a3612289] Refreshing network info cache for port f8f22fe7-d92c-4cfa-b645-fc641223c320 {{(pid=61962) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 560.109022] env[61962]: DEBUG oslo_vmware.api [-] Task: {'id': task-4891919, 'name': CreateVM_Task, 'duration_secs': 0.367848} completed successfully. {{(pid=61962) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 560.109022] env[61962]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: d0df932e-822b-4c60-a588-445aa2f14cdb] Created VM on the ESX host {{(pid=61962) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 560.109022] env[61962]: DEBUG oslo_concurrency.lockutils [None req-911f86c7-75e1-4661-8b4b-23bb532c1f1c tempest-ImagesTestJSON-240028984 tempest-ImagesTestJSON-240028984-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/f684bb17-3ab2-4bd5-a19e-4c36c57d0ebe" {{(pid=61962) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 560.233352] env[61962]: DEBUG oslo_concurrency.lockutils [None req-eaddb9ef-13ca-4c01-8d47-47e1b7975f32 tempest-TenantUsagesTestJSON-1044362109 tempest-TenantUsagesTestJSON-1044362109-project-member] Releasing lock "[datastore2] devstack-image-cache_base/f684bb17-3ab2-4bd5-a19e-4c36c57d0ebe" {{(pid=61962) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 560.233614] env[61962]: DEBUG nova.virt.vmwareapi.vmops [None req-eaddb9ef-13ca-4c01-8d47-47e1b7975f32 tempest-TenantUsagesTestJSON-1044362109 tempest-TenantUsagesTestJSON-1044362109-project-member] [instance: 1a7d5a5f-c5b1-40d2-828b-090fb8e76e3d] Processing image f684bb17-3ab2-4bd5-a19e-4c36c57d0ebe {{(pid=61962) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 560.233857] env[61962]: DEBUG oslo_concurrency.lockutils [None req-eaddb9ef-13ca-4c01-8d47-47e1b7975f32 tempest-TenantUsagesTestJSON-1044362109 tempest-TenantUsagesTestJSON-1044362109-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/f684bb17-3ab2-4bd5-a19e-4c36c57d0ebe/f684bb17-3ab2-4bd5-a19e-4c36c57d0ebe.vmdk" {{(pid=61962) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 560.234277] env[61962]: DEBUG oslo_concurrency.lockutils [None req-911f86c7-75e1-4661-8b4b-23bb532c1f1c tempest-ImagesTestJSON-240028984 tempest-ImagesTestJSON-240028984-project-member] Acquired lock "[datastore2] devstack-image-cache_base/f684bb17-3ab2-4bd5-a19e-4c36c57d0ebe" {{(pid=61962) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 560.234365] env[61962]: DEBUG oslo_concurrency.lockutils [None req-911f86c7-75e1-4661-8b4b-23bb532c1f1c tempest-ImagesTestJSON-240028984 tempest-ImagesTestJSON-240028984-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/f684bb17-3ab2-4bd5-a19e-4c36c57d0ebe" {{(pid=61962) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 560.234595] env[61962]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-a8192217-ea77-4211-a038-52f1e92ca9f0 {{(pid=61962) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 560.240110] env[61962]: DEBUG oslo_vmware.api [None req-911f86c7-75e1-4661-8b4b-23bb532c1f1c tempest-ImagesTestJSON-240028984 tempest-ImagesTestJSON-240028984-project-member] Waiting for the task: (returnval){ [ 560.240110] env[61962]: value = "session[5210918f-aadc-9b29-42fa-0929c2e42627]52f19cda-84dd-1060-8850-51b934f48595" [ 560.240110] env[61962]: _type = "Task" [ 560.240110] env[61962]: } to complete. {{(pid=61962) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 560.249052] env[61962]: DEBUG oslo_vmware.api [None req-911f86c7-75e1-4661-8b4b-23bb532c1f1c tempest-ImagesTestJSON-240028984 tempest-ImagesTestJSON-240028984-project-member] Task: {'id': session[5210918f-aadc-9b29-42fa-0929c2e42627]52f19cda-84dd-1060-8850-51b934f48595, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61962) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 560.759617] env[61962]: DEBUG oslo_concurrency.lockutils [None req-911f86c7-75e1-4661-8b4b-23bb532c1f1c tempest-ImagesTestJSON-240028984 tempest-ImagesTestJSON-240028984-project-member] Releasing lock "[datastore2] devstack-image-cache_base/f684bb17-3ab2-4bd5-a19e-4c36c57d0ebe" {{(pid=61962) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 560.759863] env[61962]: DEBUG nova.virt.vmwareapi.vmops [None req-911f86c7-75e1-4661-8b4b-23bb532c1f1c tempest-ImagesTestJSON-240028984 tempest-ImagesTestJSON-240028984-project-member] [instance: d0df932e-822b-4c60-a588-445aa2f14cdb] Processing image f684bb17-3ab2-4bd5-a19e-4c36c57d0ebe {{(pid=61962) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 560.760098] env[61962]: DEBUG oslo_concurrency.lockutils [None req-911f86c7-75e1-4661-8b4b-23bb532c1f1c tempest-ImagesTestJSON-240028984 tempest-ImagesTestJSON-240028984-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/f684bb17-3ab2-4bd5-a19e-4c36c57d0ebe/f684bb17-3ab2-4bd5-a19e-4c36c57d0ebe.vmdk" {{(pid=61962) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 561.160944] env[61962]: DEBUG oslo_concurrency.lockutils [None req-c0cd2188-095e-4a7d-a70d-63e0d919c4c3 tempest-MigrationsAdminTest-5656953 tempest-MigrationsAdminTest-5656953-project-member] Acquiring lock "a6e6f30d-c667-4f70-bf60-596a8fe97188" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61962) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 561.161237] env[61962]: DEBUG oslo_concurrency.lockutils [None req-c0cd2188-095e-4a7d-a70d-63e0d919c4c3 tempest-MigrationsAdminTest-5656953 tempest-MigrationsAdminTest-5656953-project-member] Lock "a6e6f30d-c667-4f70-bf60-596a8fe97188" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61962) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 561.185716] env[61962]: DEBUG nova.compute.manager [None req-c0cd2188-095e-4a7d-a70d-63e0d919c4c3 tempest-MigrationsAdminTest-5656953 tempest-MigrationsAdminTest-5656953-project-member] [instance: a6e6f30d-c667-4f70-bf60-596a8fe97188] Starting instance... {{(pid=61962) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2424}} [ 561.221032] env[61962]: DEBUG nova.network.neutron [req-58a33329-dfe2-435e-af46-353e63f25571 req-749f17a8-fce1-48a2-a653-58765ef7e01a service nova] [instance: 495acb0f-5b45-457e-9218-db57a3612289] Updated VIF entry in instance network info cache for port f8f22fe7-d92c-4cfa-b645-fc641223c320. {{(pid=61962) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 561.221032] env[61962]: DEBUG nova.network.neutron [req-58a33329-dfe2-435e-af46-353e63f25571 req-749f17a8-fce1-48a2-a653-58765ef7e01a service nova] [instance: 495acb0f-5b45-457e-9218-db57a3612289] Updating instance_info_cache with network_info: [{"id": "f8f22fe7-d92c-4cfa-b645-fc641223c320", "address": "fa:16:3e:09:0b:4a", "network": {"id": "dce9257e-a339-459f-9ce0-b9e3ec044f23", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.103", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "eef556fb5c4f49b889491ae31a496de5", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a69ed1dd-213a-4e30-992a-466735188bf6", "external-id": "nsx-vlan-transportzone-102", "segmentation_id": 102, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapf8f22fe7-d9", "ovs_interfaceid": "f8f22fe7-d92c-4cfa-b645-fc641223c320", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61962) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 561.234706] env[61962]: DEBUG oslo_concurrency.lockutils [req-58a33329-dfe2-435e-af46-353e63f25571 req-749f17a8-fce1-48a2-a653-58765ef7e01a service nova] Releasing lock "refresh_cache-495acb0f-5b45-457e-9218-db57a3612289" {{(pid=61962) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 561.235278] env[61962]: DEBUG nova.compute.manager [req-58a33329-dfe2-435e-af46-353e63f25571 req-749f17a8-fce1-48a2-a653-58765ef7e01a service nova] [instance: 53921884-f617-4020-b779-650becf89a58] Received event network-vif-plugged-e8b63a66-f8b4-4d6d-9a01-6238f6cebbcc {{(pid=61962) external_instance_event /opt/stack/nova/nova/compute/manager.py:11210}} [ 561.235608] env[61962]: DEBUG oslo_concurrency.lockutils [req-58a33329-dfe2-435e-af46-353e63f25571 req-749f17a8-fce1-48a2-a653-58765ef7e01a service nova] Acquiring lock "53921884-f617-4020-b779-650becf89a58-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=61962) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 561.235667] env[61962]: DEBUG oslo_concurrency.lockutils [req-58a33329-dfe2-435e-af46-353e63f25571 req-749f17a8-fce1-48a2-a653-58765ef7e01a service nova] Lock "53921884-f617-4020-b779-650becf89a58-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=61962) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 561.236077] env[61962]: DEBUG oslo_concurrency.lockutils [req-58a33329-dfe2-435e-af46-353e63f25571 req-749f17a8-fce1-48a2-a653-58765ef7e01a service nova] Lock "53921884-f617-4020-b779-650becf89a58-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=61962) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 561.236693] env[61962]: DEBUG nova.compute.manager [req-58a33329-dfe2-435e-af46-353e63f25571 req-749f17a8-fce1-48a2-a653-58765ef7e01a service nova] [instance: 53921884-f617-4020-b779-650becf89a58] No waiting events found dispatching network-vif-plugged-e8b63a66-f8b4-4d6d-9a01-6238f6cebbcc {{(pid=61962) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 561.236693] env[61962]: WARNING nova.compute.manager [req-58a33329-dfe2-435e-af46-353e63f25571 req-749f17a8-fce1-48a2-a653-58765ef7e01a service nova] [instance: 53921884-f617-4020-b779-650becf89a58] Received unexpected event network-vif-plugged-e8b63a66-f8b4-4d6d-9a01-6238f6cebbcc for instance with vm_state building and task_state spawning. [ 561.236785] env[61962]: DEBUG nova.compute.manager [req-58a33329-dfe2-435e-af46-353e63f25571 req-749f17a8-fce1-48a2-a653-58765ef7e01a service nova] [instance: 53921884-f617-4020-b779-650becf89a58] Received event network-changed-e8b63a66-f8b4-4d6d-9a01-6238f6cebbcc {{(pid=61962) external_instance_event /opt/stack/nova/nova/compute/manager.py:11210}} [ 561.238116] env[61962]: DEBUG nova.compute.manager [req-58a33329-dfe2-435e-af46-353e63f25571 req-749f17a8-fce1-48a2-a653-58765ef7e01a service nova] [instance: 53921884-f617-4020-b779-650becf89a58] Refreshing instance network info cache due to event network-changed-e8b63a66-f8b4-4d6d-9a01-6238f6cebbcc. {{(pid=61962) external_instance_event /opt/stack/nova/nova/compute/manager.py:11215}} [ 561.238116] env[61962]: DEBUG oslo_concurrency.lockutils [req-58a33329-dfe2-435e-af46-353e63f25571 req-749f17a8-fce1-48a2-a653-58765ef7e01a service nova] Acquiring lock "refresh_cache-53921884-f617-4020-b779-650becf89a58" {{(pid=61962) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 561.238116] env[61962]: DEBUG oslo_concurrency.lockutils [req-58a33329-dfe2-435e-af46-353e63f25571 req-749f17a8-fce1-48a2-a653-58765ef7e01a service nova] Acquired lock "refresh_cache-53921884-f617-4020-b779-650becf89a58" {{(pid=61962) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 561.238116] env[61962]: DEBUG nova.network.neutron [req-58a33329-dfe2-435e-af46-353e63f25571 req-749f17a8-fce1-48a2-a653-58765ef7e01a service nova] [instance: 53921884-f617-4020-b779-650becf89a58] Refreshing network info cache for port e8b63a66-f8b4-4d6d-9a01-6238f6cebbcc {{(pid=61962) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 561.305012] env[61962]: DEBUG oslo_concurrency.lockutils [None req-c0cd2188-095e-4a7d-a70d-63e0d919c4c3 tempest-MigrationsAdminTest-5656953 tempest-MigrationsAdminTest-5656953-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61962) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 561.305314] env[61962]: DEBUG oslo_concurrency.lockutils [None req-c0cd2188-095e-4a7d-a70d-63e0d919c4c3 tempest-MigrationsAdminTest-5656953 tempest-MigrationsAdminTest-5656953-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=61962) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 561.308220] env[61962]: INFO nova.compute.claims [None req-c0cd2188-095e-4a7d-a70d-63e0d919c4c3 tempest-MigrationsAdminTest-5656953 tempest-MigrationsAdminTest-5656953-project-member] [instance: a6e6f30d-c667-4f70-bf60-596a8fe97188] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 561.607721] env[61962]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4f336a47-d7d6-4e44-8cd5-f89b96a60241 {{(pid=61962) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 561.619579] env[61962]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c8ceb963-07de-4804-8d01-b2d3f4f07c14 {{(pid=61962) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 561.663452] env[61962]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-638ec4b5-55e5-4c4e-b8e3-e2cc8ffb6423 {{(pid=61962) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 561.672872] env[61962]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-361ee12a-1936-4142-af8d-fadbc7188329 {{(pid=61962) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 561.692201] env[61962]: DEBUG nova.compute.provider_tree [None req-c0cd2188-095e-4a7d-a70d-63e0d919c4c3 tempest-MigrationsAdminTest-5656953 tempest-MigrationsAdminTest-5656953-project-member] Inventory has not changed in ProviderTree for provider: 5a20dc57-fddd-49ec-bab5-953a03eebaa1 {{(pid=61962) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 561.703936] env[61962]: DEBUG nova.scheduler.client.report [None req-c0cd2188-095e-4a7d-a70d-63e0d919c4c3 tempest-MigrationsAdminTest-5656953 tempest-MigrationsAdminTest-5656953-project-member] Inventory has not changed for provider 5a20dc57-fddd-49ec-bab5-953a03eebaa1 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 96, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61962) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 561.735253] env[61962]: DEBUG oslo_concurrency.lockutils [None req-c0cd2188-095e-4a7d-a70d-63e0d919c4c3 tempest-MigrationsAdminTest-5656953 tempest-MigrationsAdminTest-5656953-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.429s {{(pid=61962) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 561.735253] env[61962]: DEBUG nova.compute.manager [None req-c0cd2188-095e-4a7d-a70d-63e0d919c4c3 tempest-MigrationsAdminTest-5656953 tempest-MigrationsAdminTest-5656953-project-member] [instance: a6e6f30d-c667-4f70-bf60-596a8fe97188] Start building networks asynchronously for instance. {{(pid=61962) _build_resources /opt/stack/nova/nova/compute/manager.py:2821}} [ 561.795345] env[61962]: DEBUG nova.compute.utils [None req-c0cd2188-095e-4a7d-a70d-63e0d919c4c3 tempest-MigrationsAdminTest-5656953 tempest-MigrationsAdminTest-5656953-project-member] Using /dev/sd instead of None {{(pid=61962) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 561.796700] env[61962]: DEBUG nova.compute.manager [None req-c0cd2188-095e-4a7d-a70d-63e0d919c4c3 tempest-MigrationsAdminTest-5656953 tempest-MigrationsAdminTest-5656953-project-member] [instance: a6e6f30d-c667-4f70-bf60-596a8fe97188] Allocating IP information in the background. {{(pid=61962) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1973}} [ 561.796877] env[61962]: DEBUG nova.network.neutron [None req-c0cd2188-095e-4a7d-a70d-63e0d919c4c3 tempest-MigrationsAdminTest-5656953 tempest-MigrationsAdminTest-5656953-project-member] [instance: a6e6f30d-c667-4f70-bf60-596a8fe97188] allocate_for_instance() {{(pid=61962) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 561.812125] env[61962]: DEBUG nova.compute.manager [None req-c0cd2188-095e-4a7d-a70d-63e0d919c4c3 tempest-MigrationsAdminTest-5656953 tempest-MigrationsAdminTest-5656953-project-member] [instance: a6e6f30d-c667-4f70-bf60-596a8fe97188] Start building block device mappings for instance. {{(pid=61962) _build_resources /opt/stack/nova/nova/compute/manager.py:2856}} [ 561.896196] env[61962]: DEBUG nova.compute.manager [None req-c0cd2188-095e-4a7d-a70d-63e0d919c4c3 tempest-MigrationsAdminTest-5656953 tempest-MigrationsAdminTest-5656953-project-member] [instance: a6e6f30d-c667-4f70-bf60-596a8fe97188] Start spawning the instance on the hypervisor. {{(pid=61962) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2630}} [ 561.946642] env[61962]: DEBUG nova.virt.hardware [None req-c0cd2188-095e-4a7d-a70d-63e0d919c4c3 tempest-MigrationsAdminTest-5656953 tempest-MigrationsAdminTest-5656953-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-12-01T12:09:23Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=128,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-12-01T12:09:07Z,direct_url=,disk_format='vmdk',id=f684bb17-3ab2-4bd5-a19e-4c36c57d0ebe,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='eef556fb5c4f49b889491ae31a496de5',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-12-01T12:09:08Z,virtual_size=,visibility=), allow threads: False {{(pid=61962) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 561.946877] env[61962]: DEBUG nova.virt.hardware [None req-c0cd2188-095e-4a7d-a70d-63e0d919c4c3 tempest-MigrationsAdminTest-5656953 tempest-MigrationsAdminTest-5656953-project-member] Flavor limits 0:0:0 {{(pid=61962) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 561.947044] env[61962]: DEBUG nova.virt.hardware [None req-c0cd2188-095e-4a7d-a70d-63e0d919c4c3 tempest-MigrationsAdminTest-5656953 tempest-MigrationsAdminTest-5656953-project-member] Image limits 0:0:0 {{(pid=61962) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 561.947225] env[61962]: DEBUG nova.virt.hardware [None req-c0cd2188-095e-4a7d-a70d-63e0d919c4c3 tempest-MigrationsAdminTest-5656953 tempest-MigrationsAdminTest-5656953-project-member] Flavor pref 0:0:0 {{(pid=61962) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 561.947365] env[61962]: DEBUG nova.virt.hardware [None req-c0cd2188-095e-4a7d-a70d-63e0d919c4c3 tempest-MigrationsAdminTest-5656953 tempest-MigrationsAdminTest-5656953-project-member] Image pref 0:0:0 {{(pid=61962) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 561.947505] env[61962]: DEBUG nova.virt.hardware [None req-c0cd2188-095e-4a7d-a70d-63e0d919c4c3 tempest-MigrationsAdminTest-5656953 tempest-MigrationsAdminTest-5656953-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61962) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 561.947710] env[61962]: DEBUG nova.virt.hardware [None req-c0cd2188-095e-4a7d-a70d-63e0d919c4c3 tempest-MigrationsAdminTest-5656953 tempest-MigrationsAdminTest-5656953-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61962) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 561.947861] env[61962]: DEBUG nova.virt.hardware [None req-c0cd2188-095e-4a7d-a70d-63e0d919c4c3 tempest-MigrationsAdminTest-5656953 tempest-MigrationsAdminTest-5656953-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61962) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 561.950352] env[61962]: DEBUG nova.virt.hardware [None req-c0cd2188-095e-4a7d-a70d-63e0d919c4c3 tempest-MigrationsAdminTest-5656953 tempest-MigrationsAdminTest-5656953-project-member] Got 1 possible topologies {{(pid=61962) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 561.951162] env[61962]: DEBUG nova.virt.hardware [None req-c0cd2188-095e-4a7d-a70d-63e0d919c4c3 tempest-MigrationsAdminTest-5656953 tempest-MigrationsAdminTest-5656953-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61962) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 561.951463] env[61962]: DEBUG nova.virt.hardware [None req-c0cd2188-095e-4a7d-a70d-63e0d919c4c3 tempest-MigrationsAdminTest-5656953 tempest-MigrationsAdminTest-5656953-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61962) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 561.953817] env[61962]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-11e1d5db-30d2-4290-9ba4-b9e421059ff3 {{(pid=61962) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 561.968625] env[61962]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e57dc40c-a798-4530-bce3-b9fb422a123f {{(pid=61962) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 562.017413] env[61962]: DEBUG nova.compute.manager [req-a48bfa77-81a6-41c6-8044-96cd9ff6bba7 req-9fc04213-0b14-4422-bb21-765d858f0f8a service nova] [instance: 6ba9719c-3689-4db6-8ab2-441dc52c7bfd] Received event network-changed-e176502f-775b-4502-891f-1388d15e7655 {{(pid=61962) external_instance_event /opt/stack/nova/nova/compute/manager.py:11210}} [ 562.018290] env[61962]: DEBUG nova.compute.manager [req-a48bfa77-81a6-41c6-8044-96cd9ff6bba7 req-9fc04213-0b14-4422-bb21-765d858f0f8a service nova] [instance: 6ba9719c-3689-4db6-8ab2-441dc52c7bfd] Refreshing instance network info cache due to event network-changed-e176502f-775b-4502-891f-1388d15e7655. {{(pid=61962) external_instance_event /opt/stack/nova/nova/compute/manager.py:11215}} [ 562.018534] env[61962]: DEBUG oslo_concurrency.lockutils [req-a48bfa77-81a6-41c6-8044-96cd9ff6bba7 req-9fc04213-0b14-4422-bb21-765d858f0f8a service nova] Acquiring lock "refresh_cache-6ba9719c-3689-4db6-8ab2-441dc52c7bfd" {{(pid=61962) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 562.018681] env[61962]: DEBUG oslo_concurrency.lockutils [req-a48bfa77-81a6-41c6-8044-96cd9ff6bba7 req-9fc04213-0b14-4422-bb21-765d858f0f8a service nova] Acquired lock "refresh_cache-6ba9719c-3689-4db6-8ab2-441dc52c7bfd" {{(pid=61962) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 562.018896] env[61962]: DEBUG nova.network.neutron [req-a48bfa77-81a6-41c6-8044-96cd9ff6bba7 req-9fc04213-0b14-4422-bb21-765d858f0f8a service nova] [instance: 6ba9719c-3689-4db6-8ab2-441dc52c7bfd] Refreshing network info cache for port e176502f-775b-4502-891f-1388d15e7655 {{(pid=61962) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 562.116212] env[61962]: DEBUG nova.policy [None req-c0cd2188-095e-4a7d-a70d-63e0d919c4c3 tempest-MigrationsAdminTest-5656953 tempest-MigrationsAdminTest-5656953-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'e2857b6d47a246a3abff6425bf6e5866', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '6e7e747becdf4976b570b79d4b20d88b', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61962) authorize /opt/stack/nova/nova/policy.py:203}} [ 562.613828] env[61962]: DEBUG nova.network.neutron [req-58a33329-dfe2-435e-af46-353e63f25571 req-749f17a8-fce1-48a2-a653-58765ef7e01a service nova] [instance: 53921884-f617-4020-b779-650becf89a58] Updated VIF entry in instance network info cache for port e8b63a66-f8b4-4d6d-9a01-6238f6cebbcc. {{(pid=61962) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 562.615939] env[61962]: DEBUG nova.network.neutron [req-58a33329-dfe2-435e-af46-353e63f25571 req-749f17a8-fce1-48a2-a653-58765ef7e01a service nova] [instance: 53921884-f617-4020-b779-650becf89a58] Updating instance_info_cache with network_info: [{"id": "e8b63a66-f8b4-4d6d-9a01-6238f6cebbcc", "address": "fa:16:3e:d3:ed:c6", "network": {"id": "dce9257e-a339-459f-9ce0-b9e3ec044f23", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.17", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "eef556fb5c4f49b889491ae31a496de5", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a69ed1dd-213a-4e30-992a-466735188bf6", "external-id": "nsx-vlan-transportzone-102", "segmentation_id": 102, "bound_drivers": {"0": "nsxv3"}}, "devname": "tape8b63a66-f8", "ovs_interfaceid": "e8b63a66-f8b4-4d6d-9a01-6238f6cebbcc", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61962) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 562.631973] env[61962]: DEBUG oslo_concurrency.lockutils [req-58a33329-dfe2-435e-af46-353e63f25571 req-749f17a8-fce1-48a2-a653-58765ef7e01a service nova] Releasing lock "refresh_cache-53921884-f617-4020-b779-650becf89a58" {{(pid=61962) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 562.631973] env[61962]: DEBUG nova.compute.manager [req-58a33329-dfe2-435e-af46-353e63f25571 req-749f17a8-fce1-48a2-a653-58765ef7e01a service nova] [instance: dc6c9cb6-419f-4122-8351-0c517fb8616d] Received event network-vif-plugged-d0601b5c-744b-4423-8d06-01097a3c2e82 {{(pid=61962) external_instance_event /opt/stack/nova/nova/compute/manager.py:11210}} [ 562.631973] env[61962]: DEBUG oslo_concurrency.lockutils [req-58a33329-dfe2-435e-af46-353e63f25571 req-749f17a8-fce1-48a2-a653-58765ef7e01a service nova] Acquiring lock "dc6c9cb6-419f-4122-8351-0c517fb8616d-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=61962) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 562.631973] env[61962]: DEBUG oslo_concurrency.lockutils [req-58a33329-dfe2-435e-af46-353e63f25571 req-749f17a8-fce1-48a2-a653-58765ef7e01a service nova] Lock "dc6c9cb6-419f-4122-8351-0c517fb8616d-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=61962) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 562.632364] env[61962]: DEBUG oslo_concurrency.lockutils [req-58a33329-dfe2-435e-af46-353e63f25571 req-749f17a8-fce1-48a2-a653-58765ef7e01a service nova] Lock "dc6c9cb6-419f-4122-8351-0c517fb8616d-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=61962) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 562.632364] env[61962]: DEBUG nova.compute.manager [req-58a33329-dfe2-435e-af46-353e63f25571 req-749f17a8-fce1-48a2-a653-58765ef7e01a service nova] [instance: dc6c9cb6-419f-4122-8351-0c517fb8616d] No waiting events found dispatching network-vif-plugged-d0601b5c-744b-4423-8d06-01097a3c2e82 {{(pid=61962) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 562.632364] env[61962]: WARNING nova.compute.manager [req-58a33329-dfe2-435e-af46-353e63f25571 req-749f17a8-fce1-48a2-a653-58765ef7e01a service nova] [instance: dc6c9cb6-419f-4122-8351-0c517fb8616d] Received unexpected event network-vif-plugged-d0601b5c-744b-4423-8d06-01097a3c2e82 for instance with vm_state building and task_state spawning. [ 562.632364] env[61962]: DEBUG nova.compute.manager [req-58a33329-dfe2-435e-af46-353e63f25571 req-749f17a8-fce1-48a2-a653-58765ef7e01a service nova] [instance: dc6c9cb6-419f-4122-8351-0c517fb8616d] Received event network-changed-d0601b5c-744b-4423-8d06-01097a3c2e82 {{(pid=61962) external_instance_event /opt/stack/nova/nova/compute/manager.py:11210}} [ 562.632605] env[61962]: DEBUG nova.compute.manager [req-58a33329-dfe2-435e-af46-353e63f25571 req-749f17a8-fce1-48a2-a653-58765ef7e01a service nova] [instance: dc6c9cb6-419f-4122-8351-0c517fb8616d] Refreshing instance network info cache due to event network-changed-d0601b5c-744b-4423-8d06-01097a3c2e82. {{(pid=61962) external_instance_event /opt/stack/nova/nova/compute/manager.py:11215}} [ 562.632605] env[61962]: DEBUG oslo_concurrency.lockutils [req-58a33329-dfe2-435e-af46-353e63f25571 req-749f17a8-fce1-48a2-a653-58765ef7e01a service nova] Acquiring lock "refresh_cache-dc6c9cb6-419f-4122-8351-0c517fb8616d" {{(pid=61962) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 562.632605] env[61962]: DEBUG oslo_concurrency.lockutils [req-58a33329-dfe2-435e-af46-353e63f25571 req-749f17a8-fce1-48a2-a653-58765ef7e01a service nova] Acquired lock "refresh_cache-dc6c9cb6-419f-4122-8351-0c517fb8616d" {{(pid=61962) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 562.633136] env[61962]: DEBUG nova.network.neutron [req-58a33329-dfe2-435e-af46-353e63f25571 req-749f17a8-fce1-48a2-a653-58765ef7e01a service nova] [instance: dc6c9cb6-419f-4122-8351-0c517fb8616d] Refreshing network info cache for port d0601b5c-744b-4423-8d06-01097a3c2e82 {{(pid=61962) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 563.791041] env[61962]: DEBUG nova.network.neutron [req-a48bfa77-81a6-41c6-8044-96cd9ff6bba7 req-9fc04213-0b14-4422-bb21-765d858f0f8a service nova] [instance: 6ba9719c-3689-4db6-8ab2-441dc52c7bfd] Updated VIF entry in instance network info cache for port e176502f-775b-4502-891f-1388d15e7655. {{(pid=61962) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 563.791450] env[61962]: DEBUG nova.network.neutron [req-a48bfa77-81a6-41c6-8044-96cd9ff6bba7 req-9fc04213-0b14-4422-bb21-765d858f0f8a service nova] [instance: 6ba9719c-3689-4db6-8ab2-441dc52c7bfd] Updating instance_info_cache with network_info: [{"id": "e176502f-775b-4502-891f-1388d15e7655", "address": "fa:16:3e:84:74:ea", "network": {"id": "f2b1b8ad-ff70-4ff0-ada9-139452e1a7cb", "bridge": "br-int", "label": "tempest-ImagesOneServerTestJSON-1241075281-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "1dd984fe1bf84471ae103d28565d160b", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ea4a9e02-45f1-4afb-8abb-0de26b153086", "external-id": "nsx-vlan-transportzone-336", "segmentation_id": 336, "bound_drivers": {"0": "nsxv3"}}, "devname": "tape176502f-77", "ovs_interfaceid": "e176502f-775b-4502-891f-1388d15e7655", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61962) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 563.806119] env[61962]: DEBUG oslo_concurrency.lockutils [req-a48bfa77-81a6-41c6-8044-96cd9ff6bba7 req-9fc04213-0b14-4422-bb21-765d858f0f8a service nova] Releasing lock "refresh_cache-6ba9719c-3689-4db6-8ab2-441dc52c7bfd" {{(pid=61962) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 563.806375] env[61962]: DEBUG nova.compute.manager [req-a48bfa77-81a6-41c6-8044-96cd9ff6bba7 req-9fc04213-0b14-4422-bb21-765d858f0f8a service nova] [instance: 1a7d5a5f-c5b1-40d2-828b-090fb8e76e3d] Received event network-vif-plugged-0816a8d0-7081-4e73-b7dd-b432b1dd003f {{(pid=61962) external_instance_event /opt/stack/nova/nova/compute/manager.py:11210}} [ 563.806554] env[61962]: DEBUG oslo_concurrency.lockutils [req-a48bfa77-81a6-41c6-8044-96cd9ff6bba7 req-9fc04213-0b14-4422-bb21-765d858f0f8a service nova] Acquiring lock "1a7d5a5f-c5b1-40d2-828b-090fb8e76e3d-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=61962) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 563.806746] env[61962]: DEBUG oslo_concurrency.lockutils [req-a48bfa77-81a6-41c6-8044-96cd9ff6bba7 req-9fc04213-0b14-4422-bb21-765d858f0f8a service nova] Lock "1a7d5a5f-c5b1-40d2-828b-090fb8e76e3d-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=61962) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 563.806898] env[61962]: DEBUG oslo_concurrency.lockutils [req-a48bfa77-81a6-41c6-8044-96cd9ff6bba7 req-9fc04213-0b14-4422-bb21-765d858f0f8a service nova] Lock "1a7d5a5f-c5b1-40d2-828b-090fb8e76e3d-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=61962) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 563.807072] env[61962]: DEBUG nova.compute.manager [req-a48bfa77-81a6-41c6-8044-96cd9ff6bba7 req-9fc04213-0b14-4422-bb21-765d858f0f8a service nova] [instance: 1a7d5a5f-c5b1-40d2-828b-090fb8e76e3d] No waiting events found dispatching network-vif-plugged-0816a8d0-7081-4e73-b7dd-b432b1dd003f {{(pid=61962) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 563.810020] env[61962]: WARNING nova.compute.manager [req-a48bfa77-81a6-41c6-8044-96cd9ff6bba7 req-9fc04213-0b14-4422-bb21-765d858f0f8a service nova] [instance: 1a7d5a5f-c5b1-40d2-828b-090fb8e76e3d] Received unexpected event network-vif-plugged-0816a8d0-7081-4e73-b7dd-b432b1dd003f for instance with vm_state building and task_state spawning. [ 563.810020] env[61962]: DEBUG nova.compute.manager [req-a48bfa77-81a6-41c6-8044-96cd9ff6bba7 req-9fc04213-0b14-4422-bb21-765d858f0f8a service nova] [instance: d0df932e-822b-4c60-a588-445aa2f14cdb] Received event network-vif-plugged-a3da001e-9ef5-4b16-ac0f-1196d6253f2a {{(pid=61962) external_instance_event /opt/stack/nova/nova/compute/manager.py:11210}} [ 563.810020] env[61962]: DEBUG oslo_concurrency.lockutils [req-a48bfa77-81a6-41c6-8044-96cd9ff6bba7 req-9fc04213-0b14-4422-bb21-765d858f0f8a service nova] Acquiring lock "d0df932e-822b-4c60-a588-445aa2f14cdb-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=61962) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 563.810020] env[61962]: DEBUG oslo_concurrency.lockutils [req-a48bfa77-81a6-41c6-8044-96cd9ff6bba7 req-9fc04213-0b14-4422-bb21-765d858f0f8a service nova] Lock "d0df932e-822b-4c60-a588-445aa2f14cdb-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=61962) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 563.810287] env[61962]: DEBUG oslo_concurrency.lockutils [req-a48bfa77-81a6-41c6-8044-96cd9ff6bba7 req-9fc04213-0b14-4422-bb21-765d858f0f8a service nova] Lock "d0df932e-822b-4c60-a588-445aa2f14cdb-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=61962) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 563.810287] env[61962]: DEBUG nova.compute.manager [req-a48bfa77-81a6-41c6-8044-96cd9ff6bba7 req-9fc04213-0b14-4422-bb21-765d858f0f8a service nova] [instance: d0df932e-822b-4c60-a588-445aa2f14cdb] No waiting events found dispatching network-vif-plugged-a3da001e-9ef5-4b16-ac0f-1196d6253f2a {{(pid=61962) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 563.810287] env[61962]: WARNING nova.compute.manager [req-a48bfa77-81a6-41c6-8044-96cd9ff6bba7 req-9fc04213-0b14-4422-bb21-765d858f0f8a service nova] [instance: d0df932e-822b-4c60-a588-445aa2f14cdb] Received unexpected event network-vif-plugged-a3da001e-9ef5-4b16-ac0f-1196d6253f2a for instance with vm_state building and task_state spawning. [ 563.810287] env[61962]: DEBUG nova.compute.manager [req-a48bfa77-81a6-41c6-8044-96cd9ff6bba7 req-9fc04213-0b14-4422-bb21-765d858f0f8a service nova] [instance: 1a7d5a5f-c5b1-40d2-828b-090fb8e76e3d] Received event network-changed-0816a8d0-7081-4e73-b7dd-b432b1dd003f {{(pid=61962) external_instance_event /opt/stack/nova/nova/compute/manager.py:11210}} [ 563.810435] env[61962]: DEBUG nova.compute.manager [req-a48bfa77-81a6-41c6-8044-96cd9ff6bba7 req-9fc04213-0b14-4422-bb21-765d858f0f8a service nova] [instance: 1a7d5a5f-c5b1-40d2-828b-090fb8e76e3d] Refreshing instance network info cache due to event network-changed-0816a8d0-7081-4e73-b7dd-b432b1dd003f. {{(pid=61962) external_instance_event /opt/stack/nova/nova/compute/manager.py:11215}} [ 563.810435] env[61962]: DEBUG oslo_concurrency.lockutils [req-a48bfa77-81a6-41c6-8044-96cd9ff6bba7 req-9fc04213-0b14-4422-bb21-765d858f0f8a service nova] Acquiring lock "refresh_cache-1a7d5a5f-c5b1-40d2-828b-090fb8e76e3d" {{(pid=61962) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 563.810435] env[61962]: DEBUG oslo_concurrency.lockutils [req-a48bfa77-81a6-41c6-8044-96cd9ff6bba7 req-9fc04213-0b14-4422-bb21-765d858f0f8a service nova] Acquired lock "refresh_cache-1a7d5a5f-c5b1-40d2-828b-090fb8e76e3d" {{(pid=61962) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 563.812297] env[61962]: DEBUG nova.network.neutron [req-a48bfa77-81a6-41c6-8044-96cd9ff6bba7 req-9fc04213-0b14-4422-bb21-765d858f0f8a service nova] [instance: 1a7d5a5f-c5b1-40d2-828b-090fb8e76e3d] Refreshing network info cache for port 0816a8d0-7081-4e73-b7dd-b432b1dd003f {{(pid=61962) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 564.660645] env[61962]: DEBUG nova.network.neutron [req-58a33329-dfe2-435e-af46-353e63f25571 req-749f17a8-fce1-48a2-a653-58765ef7e01a service nova] [instance: dc6c9cb6-419f-4122-8351-0c517fb8616d] Updated VIF entry in instance network info cache for port d0601b5c-744b-4423-8d06-01097a3c2e82. {{(pid=61962) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 564.661045] env[61962]: DEBUG nova.network.neutron [req-58a33329-dfe2-435e-af46-353e63f25571 req-749f17a8-fce1-48a2-a653-58765ef7e01a service nova] [instance: dc6c9cb6-419f-4122-8351-0c517fb8616d] Updating instance_info_cache with network_info: [{"id": "d0601b5c-744b-4423-8d06-01097a3c2e82", "address": "fa:16:3e:23:5f:57", "network": {"id": "dce9257e-a339-459f-9ce0-b9e3ec044f23", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.77", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "eef556fb5c4f49b889491ae31a496de5", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a69ed1dd-213a-4e30-992a-466735188bf6", "external-id": "nsx-vlan-transportzone-102", "segmentation_id": 102, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapd0601b5c-74", "ovs_interfaceid": "d0601b5c-744b-4423-8d06-01097a3c2e82", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61962) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 564.676269] env[61962]: DEBUG oslo_concurrency.lockutils [req-58a33329-dfe2-435e-af46-353e63f25571 req-749f17a8-fce1-48a2-a653-58765ef7e01a service nova] Releasing lock "refresh_cache-dc6c9cb6-419f-4122-8351-0c517fb8616d" {{(pid=61962) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 565.055345] env[61962]: DEBUG nova.network.neutron [None req-c0cd2188-095e-4a7d-a70d-63e0d919c4c3 tempest-MigrationsAdminTest-5656953 tempest-MigrationsAdminTest-5656953-project-member] [instance: a6e6f30d-c667-4f70-bf60-596a8fe97188] Successfully created port: c6449b59-aa15-425f-ab70-b701b0cf4248 {{(pid=61962) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 565.920715] env[61962]: DEBUG nova.network.neutron [req-a48bfa77-81a6-41c6-8044-96cd9ff6bba7 req-9fc04213-0b14-4422-bb21-765d858f0f8a service nova] [instance: 1a7d5a5f-c5b1-40d2-828b-090fb8e76e3d] Updated VIF entry in instance network info cache for port 0816a8d0-7081-4e73-b7dd-b432b1dd003f. {{(pid=61962) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 565.921085] env[61962]: DEBUG nova.network.neutron [req-a48bfa77-81a6-41c6-8044-96cd9ff6bba7 req-9fc04213-0b14-4422-bb21-765d858f0f8a service nova] [instance: 1a7d5a5f-c5b1-40d2-828b-090fb8e76e3d] Updating instance_info_cache with network_info: [{"id": "0816a8d0-7081-4e73-b7dd-b432b1dd003f", "address": "fa:16:3e:b0:5a:a5", "network": {"id": "dce9257e-a339-459f-9ce0-b9e3ec044f23", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.216", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "eef556fb5c4f49b889491ae31a496de5", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a69ed1dd-213a-4e30-992a-466735188bf6", "external-id": "nsx-vlan-transportzone-102", "segmentation_id": 102, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap0816a8d0-70", "ovs_interfaceid": "0816a8d0-7081-4e73-b7dd-b432b1dd003f", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61962) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 565.934515] env[61962]: DEBUG oslo_concurrency.lockutils [req-a48bfa77-81a6-41c6-8044-96cd9ff6bba7 req-9fc04213-0b14-4422-bb21-765d858f0f8a service nova] Releasing lock "refresh_cache-1a7d5a5f-c5b1-40d2-828b-090fb8e76e3d" {{(pid=61962) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 565.934662] env[61962]: DEBUG nova.compute.manager [req-a48bfa77-81a6-41c6-8044-96cd9ff6bba7 req-9fc04213-0b14-4422-bb21-765d858f0f8a service nova] [instance: d0df932e-822b-4c60-a588-445aa2f14cdb] Received event network-changed-a3da001e-9ef5-4b16-ac0f-1196d6253f2a {{(pid=61962) external_instance_event /opt/stack/nova/nova/compute/manager.py:11210}} [ 565.934765] env[61962]: DEBUG nova.compute.manager [req-a48bfa77-81a6-41c6-8044-96cd9ff6bba7 req-9fc04213-0b14-4422-bb21-765d858f0f8a service nova] [instance: d0df932e-822b-4c60-a588-445aa2f14cdb] Refreshing instance network info cache due to event network-changed-a3da001e-9ef5-4b16-ac0f-1196d6253f2a. {{(pid=61962) external_instance_event /opt/stack/nova/nova/compute/manager.py:11215}} [ 565.934968] env[61962]: DEBUG oslo_concurrency.lockutils [req-a48bfa77-81a6-41c6-8044-96cd9ff6bba7 req-9fc04213-0b14-4422-bb21-765d858f0f8a service nova] Acquiring lock "refresh_cache-d0df932e-822b-4c60-a588-445aa2f14cdb" {{(pid=61962) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 565.935180] env[61962]: DEBUG oslo_concurrency.lockutils [req-a48bfa77-81a6-41c6-8044-96cd9ff6bba7 req-9fc04213-0b14-4422-bb21-765d858f0f8a service nova] Acquired lock "refresh_cache-d0df932e-822b-4c60-a588-445aa2f14cdb" {{(pid=61962) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 565.935343] env[61962]: DEBUG nova.network.neutron [req-a48bfa77-81a6-41c6-8044-96cd9ff6bba7 req-9fc04213-0b14-4422-bb21-765d858f0f8a service nova] [instance: d0df932e-822b-4c60-a588-445aa2f14cdb] Refreshing network info cache for port a3da001e-9ef5-4b16-ac0f-1196d6253f2a {{(pid=61962) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 566.773420] env[61962]: DEBUG oslo_concurrency.lockutils [None req-3f485ca9-0fd9-450d-ac9a-a3575d3edc94 tempest-ServersAdmin275Test-677477004 tempest-ServersAdmin275Test-677477004-project-member] Acquiring lock "b9782756-27e1-46fc-940f-069cfb9ab9f9" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61962) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 566.773420] env[61962]: DEBUG oslo_concurrency.lockutils [None req-3f485ca9-0fd9-450d-ac9a-a3575d3edc94 tempest-ServersAdmin275Test-677477004 tempest-ServersAdmin275Test-677477004-project-member] Lock "b9782756-27e1-46fc-940f-069cfb9ab9f9" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61962) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 566.793689] env[61962]: DEBUG nova.compute.manager [None req-3f485ca9-0fd9-450d-ac9a-a3575d3edc94 tempest-ServersAdmin275Test-677477004 tempest-ServersAdmin275Test-677477004-project-member] [instance: b9782756-27e1-46fc-940f-069cfb9ab9f9] Starting instance... {{(pid=61962) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2424}} [ 566.868505] env[61962]: DEBUG oslo_concurrency.lockutils [None req-3f485ca9-0fd9-450d-ac9a-a3575d3edc94 tempest-ServersAdmin275Test-677477004 tempest-ServersAdmin275Test-677477004-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61962) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 566.868758] env[61962]: DEBUG oslo_concurrency.lockutils [None req-3f485ca9-0fd9-450d-ac9a-a3575d3edc94 tempest-ServersAdmin275Test-677477004 tempest-ServersAdmin275Test-677477004-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=61962) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 566.870769] env[61962]: INFO nova.compute.claims [None req-3f485ca9-0fd9-450d-ac9a-a3575d3edc94 tempest-ServersAdmin275Test-677477004 tempest-ServersAdmin275Test-677477004-project-member] [instance: b9782756-27e1-46fc-940f-069cfb9ab9f9] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 567.089114] env[61962]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d9eeda7e-4ec8-41f4-94a9-94661b876275 {{(pid=61962) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 567.101201] env[61962]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7aedf605-6ce1-4dd5-9b60-8a8496aedc67 {{(pid=61962) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 567.140023] env[61962]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-89e2a396-1eee-477d-b999-2e645f0230fa {{(pid=61962) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 567.146109] env[61962]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bc3dda80-51ed-4552-aad0-f8176c201004 {{(pid=61962) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 567.163221] env[61962]: DEBUG nova.compute.provider_tree [None req-3f485ca9-0fd9-450d-ac9a-a3575d3edc94 tempest-ServersAdmin275Test-677477004 tempest-ServersAdmin275Test-677477004-project-member] Inventory has not changed in ProviderTree for provider: 5a20dc57-fddd-49ec-bab5-953a03eebaa1 {{(pid=61962) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 567.181654] env[61962]: DEBUG nova.scheduler.client.report [None req-3f485ca9-0fd9-450d-ac9a-a3575d3edc94 tempest-ServersAdmin275Test-677477004 tempest-ServersAdmin275Test-677477004-project-member] Inventory has not changed for provider 5a20dc57-fddd-49ec-bab5-953a03eebaa1 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 96, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61962) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 567.203439] env[61962]: DEBUG oslo_concurrency.lockutils [None req-3f485ca9-0fd9-450d-ac9a-a3575d3edc94 tempest-ServersAdmin275Test-677477004 tempest-ServersAdmin275Test-677477004-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.334s {{(pid=61962) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 567.205237] env[61962]: DEBUG nova.compute.manager [None req-3f485ca9-0fd9-450d-ac9a-a3575d3edc94 tempest-ServersAdmin275Test-677477004 tempest-ServersAdmin275Test-677477004-project-member] [instance: b9782756-27e1-46fc-940f-069cfb9ab9f9] Start building networks asynchronously for instance. {{(pid=61962) _build_resources /opt/stack/nova/nova/compute/manager.py:2821}} [ 567.271931] env[61962]: DEBUG nova.compute.utils [None req-3f485ca9-0fd9-450d-ac9a-a3575d3edc94 tempest-ServersAdmin275Test-677477004 tempest-ServersAdmin275Test-677477004-project-member] Using /dev/sd instead of None {{(pid=61962) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 567.271931] env[61962]: DEBUG nova.compute.manager [None req-3f485ca9-0fd9-450d-ac9a-a3575d3edc94 tempest-ServersAdmin275Test-677477004 tempest-ServersAdmin275Test-677477004-project-member] [instance: b9782756-27e1-46fc-940f-069cfb9ab9f9] Not allocating networking since 'none' was specified. {{(pid=61962) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1969}} [ 567.292665] env[61962]: DEBUG nova.compute.manager [None req-3f485ca9-0fd9-450d-ac9a-a3575d3edc94 tempest-ServersAdmin275Test-677477004 tempest-ServersAdmin275Test-677477004-project-member] [instance: b9782756-27e1-46fc-940f-069cfb9ab9f9] Start building block device mappings for instance. {{(pid=61962) _build_resources /opt/stack/nova/nova/compute/manager.py:2856}} [ 567.398658] env[61962]: DEBUG nova.network.neutron [req-a48bfa77-81a6-41c6-8044-96cd9ff6bba7 req-9fc04213-0b14-4422-bb21-765d858f0f8a service nova] [instance: d0df932e-822b-4c60-a588-445aa2f14cdb] Updated VIF entry in instance network info cache for port a3da001e-9ef5-4b16-ac0f-1196d6253f2a. {{(pid=61962) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 567.398998] env[61962]: DEBUG nova.network.neutron [req-a48bfa77-81a6-41c6-8044-96cd9ff6bba7 req-9fc04213-0b14-4422-bb21-765d858f0f8a service nova] [instance: d0df932e-822b-4c60-a588-445aa2f14cdb] Updating instance_info_cache with network_info: [{"id": "a3da001e-9ef5-4b16-ac0f-1196d6253f2a", "address": "fa:16:3e:51:1b:04", "network": {"id": "4128bf2d-763a-4743-9aa9-47746ebfaa93", "bridge": "br-int", "label": "tempest-ImagesTestJSON-1442080107-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "e58b68272c404983993a09ad12e8c03b", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "be8bd197-4b2b-46e7-88ea-2554b0438584", "external-id": "nsx-vlan-transportzone-338", "segmentation_id": 338, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapa3da001e-9e", "ovs_interfaceid": "a3da001e-9ef5-4b16-ac0f-1196d6253f2a", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61962) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 567.417240] env[61962]: DEBUG oslo_concurrency.lockutils [req-a48bfa77-81a6-41c6-8044-96cd9ff6bba7 req-9fc04213-0b14-4422-bb21-765d858f0f8a service nova] Releasing lock "refresh_cache-d0df932e-822b-4c60-a588-445aa2f14cdb" {{(pid=61962) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 567.423695] env[61962]: DEBUG nova.compute.manager [None req-3f485ca9-0fd9-450d-ac9a-a3575d3edc94 tempest-ServersAdmin275Test-677477004 tempest-ServersAdmin275Test-677477004-project-member] [instance: b9782756-27e1-46fc-940f-069cfb9ab9f9] Start spawning the instance on the hypervisor. {{(pid=61962) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2630}} [ 567.469143] env[61962]: DEBUG nova.virt.hardware [None req-3f485ca9-0fd9-450d-ac9a-a3575d3edc94 tempest-ServersAdmin275Test-677477004 tempest-ServersAdmin275Test-677477004-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-12-01T12:09:23Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=128,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-12-01T12:09:07Z,direct_url=,disk_format='vmdk',id=f684bb17-3ab2-4bd5-a19e-4c36c57d0ebe,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='eef556fb5c4f49b889491ae31a496de5',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-12-01T12:09:08Z,virtual_size=,visibility=), allow threads: False {{(pid=61962) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 567.469143] env[61962]: DEBUG nova.virt.hardware [None req-3f485ca9-0fd9-450d-ac9a-a3575d3edc94 tempest-ServersAdmin275Test-677477004 tempest-ServersAdmin275Test-677477004-project-member] Flavor limits 0:0:0 {{(pid=61962) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 567.469143] env[61962]: DEBUG nova.virt.hardware [None req-3f485ca9-0fd9-450d-ac9a-a3575d3edc94 tempest-ServersAdmin275Test-677477004 tempest-ServersAdmin275Test-677477004-project-member] Image limits 0:0:0 {{(pid=61962) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 567.469344] env[61962]: DEBUG nova.virt.hardware [None req-3f485ca9-0fd9-450d-ac9a-a3575d3edc94 tempest-ServersAdmin275Test-677477004 tempest-ServersAdmin275Test-677477004-project-member] Flavor pref 0:0:0 {{(pid=61962) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 567.469344] env[61962]: DEBUG nova.virt.hardware [None req-3f485ca9-0fd9-450d-ac9a-a3575d3edc94 tempest-ServersAdmin275Test-677477004 tempest-ServersAdmin275Test-677477004-project-member] Image pref 0:0:0 {{(pid=61962) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 567.469344] env[61962]: DEBUG nova.virt.hardware [None req-3f485ca9-0fd9-450d-ac9a-a3575d3edc94 tempest-ServersAdmin275Test-677477004 tempest-ServersAdmin275Test-677477004-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61962) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 567.469615] env[61962]: DEBUG nova.virt.hardware [None req-3f485ca9-0fd9-450d-ac9a-a3575d3edc94 tempest-ServersAdmin275Test-677477004 tempest-ServersAdmin275Test-677477004-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61962) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 567.469823] env[61962]: DEBUG nova.virt.hardware [None req-3f485ca9-0fd9-450d-ac9a-a3575d3edc94 tempest-ServersAdmin275Test-677477004 tempest-ServersAdmin275Test-677477004-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61962) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 567.470040] env[61962]: DEBUG nova.virt.hardware [None req-3f485ca9-0fd9-450d-ac9a-a3575d3edc94 tempest-ServersAdmin275Test-677477004 tempest-ServersAdmin275Test-677477004-project-member] Got 1 possible topologies {{(pid=61962) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 567.470318] env[61962]: DEBUG nova.virt.hardware [None req-3f485ca9-0fd9-450d-ac9a-a3575d3edc94 tempest-ServersAdmin275Test-677477004 tempest-ServersAdmin275Test-677477004-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61962) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 567.470376] env[61962]: DEBUG nova.virt.hardware [None req-3f485ca9-0fd9-450d-ac9a-a3575d3edc94 tempest-ServersAdmin275Test-677477004 tempest-ServersAdmin275Test-677477004-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61962) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 567.471302] env[61962]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-de93da39-c954-4c5e-a58f-b6017156ad33 {{(pid=61962) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 567.484307] env[61962]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-02b284ab-049b-471f-aa0c-01e895e2c029 {{(pid=61962) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 567.507120] env[61962]: DEBUG nova.virt.vmwareapi.vmops [None req-3f485ca9-0fd9-450d-ac9a-a3575d3edc94 tempest-ServersAdmin275Test-677477004 tempest-ServersAdmin275Test-677477004-project-member] [instance: b9782756-27e1-46fc-940f-069cfb9ab9f9] Instance VIF info [] {{(pid=61962) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 567.514358] env[61962]: DEBUG nova.virt.vmwareapi.vm_util [None req-3f485ca9-0fd9-450d-ac9a-a3575d3edc94 tempest-ServersAdmin275Test-677477004 tempest-ServersAdmin275Test-677477004-project-member] Creating folder: Project (a7b748ae1e6d4e17bc463811008c5596). Parent ref: group-v953327. {{(pid=61962) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 567.514358] env[61962]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-a43b0a47-4058-4ca7-8c4a-4aef930b2267 {{(pid=61962) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 567.527784] env[61962]: INFO nova.virt.vmwareapi.vm_util [None req-3f485ca9-0fd9-450d-ac9a-a3575d3edc94 tempest-ServersAdmin275Test-677477004 tempest-ServersAdmin275Test-677477004-project-member] Created folder: Project (a7b748ae1e6d4e17bc463811008c5596) in parent group-v953327. [ 567.528046] env[61962]: DEBUG nova.virt.vmwareapi.vm_util [None req-3f485ca9-0fd9-450d-ac9a-a3575d3edc94 tempest-ServersAdmin275Test-677477004 tempest-ServersAdmin275Test-677477004-project-member] Creating folder: Instances. Parent ref: group-v953349. {{(pid=61962) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 567.528300] env[61962]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-d2be716f-a2e3-4311-b975-b2ee714105b3 {{(pid=61962) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 567.538634] env[61962]: INFO nova.virt.vmwareapi.vm_util [None req-3f485ca9-0fd9-450d-ac9a-a3575d3edc94 tempest-ServersAdmin275Test-677477004 tempest-ServersAdmin275Test-677477004-project-member] Created folder: Instances in parent group-v953349. [ 567.538851] env[61962]: DEBUG oslo.service.loopingcall [None req-3f485ca9-0fd9-450d-ac9a-a3575d3edc94 tempest-ServersAdmin275Test-677477004 tempest-ServersAdmin275Test-677477004-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=61962) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 567.539140] env[61962]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: b9782756-27e1-46fc-940f-069cfb9ab9f9] Creating VM on the ESX host {{(pid=61962) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 567.539633] env[61962]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-326cb934-3390-46fc-a129-37ebd44b2f15 {{(pid=61962) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 567.562215] env[61962]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 567.562215] env[61962]: value = "task-4891922" [ 567.562215] env[61962]: _type = "Task" [ 567.562215] env[61962]: } to complete. {{(pid=61962) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 567.574851] env[61962]: DEBUG oslo_vmware.api [-] Task: {'id': task-4891922, 'name': CreateVM_Task} progress is 0%. {{(pid=61962) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 568.072430] env[61962]: DEBUG oslo_vmware.api [-] Task: {'id': task-4891922, 'name': CreateVM_Task, 'duration_secs': 0.336418} completed successfully. {{(pid=61962) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 568.074493] env[61962]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: b9782756-27e1-46fc-940f-069cfb9ab9f9] Created VM on the ESX host {{(pid=61962) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 568.074493] env[61962]: DEBUG oslo_concurrency.lockutils [None req-3f485ca9-0fd9-450d-ac9a-a3575d3edc94 tempest-ServersAdmin275Test-677477004 tempest-ServersAdmin275Test-677477004-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/f684bb17-3ab2-4bd5-a19e-4c36c57d0ebe" {{(pid=61962) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 568.074493] env[61962]: DEBUG oslo_concurrency.lockutils [None req-3f485ca9-0fd9-450d-ac9a-a3575d3edc94 tempest-ServersAdmin275Test-677477004 tempest-ServersAdmin275Test-677477004-project-member] Acquired lock "[datastore2] devstack-image-cache_base/f684bb17-3ab2-4bd5-a19e-4c36c57d0ebe" {{(pid=61962) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 568.074493] env[61962]: DEBUG oslo_concurrency.lockutils [None req-3f485ca9-0fd9-450d-ac9a-a3575d3edc94 tempest-ServersAdmin275Test-677477004 tempest-ServersAdmin275Test-677477004-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/f684bb17-3ab2-4bd5-a19e-4c36c57d0ebe" {{(pid=61962) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 568.074493] env[61962]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-064fc1ef-952c-49c4-8fe4-507db39ab45e {{(pid=61962) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 568.080742] env[61962]: DEBUG oslo_vmware.api [None req-3f485ca9-0fd9-450d-ac9a-a3575d3edc94 tempest-ServersAdmin275Test-677477004 tempest-ServersAdmin275Test-677477004-project-member] Waiting for the task: (returnval){ [ 568.080742] env[61962]: value = "session[5210918f-aadc-9b29-42fa-0929c2e42627]5219635f-b996-f002-c665-50a780809af6" [ 568.080742] env[61962]: _type = "Task" [ 568.080742] env[61962]: } to complete. {{(pid=61962) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 568.089569] env[61962]: DEBUG oslo_vmware.api [None req-3f485ca9-0fd9-450d-ac9a-a3575d3edc94 tempest-ServersAdmin275Test-677477004 tempest-ServersAdmin275Test-677477004-project-member] Task: {'id': session[5210918f-aadc-9b29-42fa-0929c2e42627]5219635f-b996-f002-c665-50a780809af6, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61962) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 568.598255] env[61962]: DEBUG oslo_concurrency.lockutils [None req-3f485ca9-0fd9-450d-ac9a-a3575d3edc94 tempest-ServersAdmin275Test-677477004 tempest-ServersAdmin275Test-677477004-project-member] Releasing lock "[datastore2] devstack-image-cache_base/f684bb17-3ab2-4bd5-a19e-4c36c57d0ebe" {{(pid=61962) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 568.598571] env[61962]: DEBUG nova.virt.vmwareapi.vmops [None req-3f485ca9-0fd9-450d-ac9a-a3575d3edc94 tempest-ServersAdmin275Test-677477004 tempest-ServersAdmin275Test-677477004-project-member] [instance: b9782756-27e1-46fc-940f-069cfb9ab9f9] Processing image f684bb17-3ab2-4bd5-a19e-4c36c57d0ebe {{(pid=61962) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 568.598840] env[61962]: DEBUG oslo_concurrency.lockutils [None req-3f485ca9-0fd9-450d-ac9a-a3575d3edc94 tempest-ServersAdmin275Test-677477004 tempest-ServersAdmin275Test-677477004-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/f684bb17-3ab2-4bd5-a19e-4c36c57d0ebe/f684bb17-3ab2-4bd5-a19e-4c36c57d0ebe.vmdk" {{(pid=61962) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 568.764442] env[61962]: DEBUG nova.network.neutron [None req-c0cd2188-095e-4a7d-a70d-63e0d919c4c3 tempest-MigrationsAdminTest-5656953 tempest-MigrationsAdminTest-5656953-project-member] [instance: a6e6f30d-c667-4f70-bf60-596a8fe97188] Successfully updated port: c6449b59-aa15-425f-ab70-b701b0cf4248 {{(pid=61962) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 568.776612] env[61962]: DEBUG oslo_concurrency.lockutils [None req-c0cd2188-095e-4a7d-a70d-63e0d919c4c3 tempest-MigrationsAdminTest-5656953 tempest-MigrationsAdminTest-5656953-project-member] Acquiring lock "refresh_cache-a6e6f30d-c667-4f70-bf60-596a8fe97188" {{(pid=61962) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 568.776794] env[61962]: DEBUG oslo_concurrency.lockutils [None req-c0cd2188-095e-4a7d-a70d-63e0d919c4c3 tempest-MigrationsAdminTest-5656953 tempest-MigrationsAdminTest-5656953-project-member] Acquired lock "refresh_cache-a6e6f30d-c667-4f70-bf60-596a8fe97188" {{(pid=61962) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 568.776977] env[61962]: DEBUG nova.network.neutron [None req-c0cd2188-095e-4a7d-a70d-63e0d919c4c3 tempest-MigrationsAdminTest-5656953 tempest-MigrationsAdminTest-5656953-project-member] [instance: a6e6f30d-c667-4f70-bf60-596a8fe97188] Building network info cache for instance {{(pid=61962) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 568.911673] env[61962]: DEBUG nova.network.neutron [None req-c0cd2188-095e-4a7d-a70d-63e0d919c4c3 tempest-MigrationsAdminTest-5656953 tempest-MigrationsAdminTest-5656953-project-member] [instance: a6e6f30d-c667-4f70-bf60-596a8fe97188] Instance cache missing network info. {{(pid=61962) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 570.008120] env[61962]: DEBUG nova.network.neutron [None req-c0cd2188-095e-4a7d-a70d-63e0d919c4c3 tempest-MigrationsAdminTest-5656953 tempest-MigrationsAdminTest-5656953-project-member] [instance: a6e6f30d-c667-4f70-bf60-596a8fe97188] Updating instance_info_cache with network_info: [{"id": "c6449b59-aa15-425f-ab70-b701b0cf4248", "address": "fa:16:3e:fe:65:46", "network": {"id": "dce9257e-a339-459f-9ce0-b9e3ec044f23", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.152", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "eef556fb5c4f49b889491ae31a496de5", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a69ed1dd-213a-4e30-992a-466735188bf6", "external-id": "nsx-vlan-transportzone-102", "segmentation_id": 102, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapc6449b59-aa", "ovs_interfaceid": "c6449b59-aa15-425f-ab70-b701b0cf4248", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61962) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 570.026881] env[61962]: DEBUG oslo_concurrency.lockutils [None req-c0cd2188-095e-4a7d-a70d-63e0d919c4c3 tempest-MigrationsAdminTest-5656953 tempest-MigrationsAdminTest-5656953-project-member] Releasing lock "refresh_cache-a6e6f30d-c667-4f70-bf60-596a8fe97188" {{(pid=61962) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 570.027226] env[61962]: DEBUG nova.compute.manager [None req-c0cd2188-095e-4a7d-a70d-63e0d919c4c3 tempest-MigrationsAdminTest-5656953 tempest-MigrationsAdminTest-5656953-project-member] [instance: a6e6f30d-c667-4f70-bf60-596a8fe97188] Instance network_info: |[{"id": "c6449b59-aa15-425f-ab70-b701b0cf4248", "address": "fa:16:3e:fe:65:46", "network": {"id": "dce9257e-a339-459f-9ce0-b9e3ec044f23", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.152", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "eef556fb5c4f49b889491ae31a496de5", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a69ed1dd-213a-4e30-992a-466735188bf6", "external-id": "nsx-vlan-transportzone-102", "segmentation_id": 102, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapc6449b59-aa", "ovs_interfaceid": "c6449b59-aa15-425f-ab70-b701b0cf4248", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=61962) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 570.027649] env[61962]: DEBUG nova.virt.vmwareapi.vmops [None req-c0cd2188-095e-4a7d-a70d-63e0d919c4c3 tempest-MigrationsAdminTest-5656953 tempest-MigrationsAdminTest-5656953-project-member] [instance: a6e6f30d-c667-4f70-bf60-596a8fe97188] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:fe:65:46', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'a69ed1dd-213a-4e30-992a-466735188bf6', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'c6449b59-aa15-425f-ab70-b701b0cf4248', 'vif_model': 'vmxnet3'}] {{(pid=61962) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 570.038344] env[61962]: DEBUG nova.virt.vmwareapi.vm_util [None req-c0cd2188-095e-4a7d-a70d-63e0d919c4c3 tempest-MigrationsAdminTest-5656953 tempest-MigrationsAdminTest-5656953-project-member] Creating folder: Project (6e7e747becdf4976b570b79d4b20d88b). Parent ref: group-v953327. {{(pid=61962) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 570.038938] env[61962]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-a2f6af41-205e-4bd5-bf28-f1189b2d4a23 {{(pid=61962) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 570.053176] env[61962]: INFO nova.virt.vmwareapi.vm_util [None req-c0cd2188-095e-4a7d-a70d-63e0d919c4c3 tempest-MigrationsAdminTest-5656953 tempest-MigrationsAdminTest-5656953-project-member] Created folder: Project (6e7e747becdf4976b570b79d4b20d88b) in parent group-v953327. [ 570.053396] env[61962]: DEBUG nova.virt.vmwareapi.vm_util [None req-c0cd2188-095e-4a7d-a70d-63e0d919c4c3 tempest-MigrationsAdminTest-5656953 tempest-MigrationsAdminTest-5656953-project-member] Creating folder: Instances. Parent ref: group-v953352. {{(pid=61962) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 570.057473] env[61962]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-fcd0c185-78de-433e-bd84-3a7fdd24ed88 {{(pid=61962) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 570.075303] env[61962]: INFO nova.virt.vmwareapi.vm_util [None req-c0cd2188-095e-4a7d-a70d-63e0d919c4c3 tempest-MigrationsAdminTest-5656953 tempest-MigrationsAdminTest-5656953-project-member] Created folder: Instances in parent group-v953352. [ 570.080476] env[61962]: DEBUG oslo.service.loopingcall [None req-c0cd2188-095e-4a7d-a70d-63e0d919c4c3 tempest-MigrationsAdminTest-5656953 tempest-MigrationsAdminTest-5656953-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=61962) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 570.080476] env[61962]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: a6e6f30d-c667-4f70-bf60-596a8fe97188] Creating VM on the ESX host {{(pid=61962) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 570.080476] env[61962]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-9e0fe3a6-4a1c-43d6-9e96-76b3cfcc7571 {{(pid=61962) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 570.104777] env[61962]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 570.104777] env[61962]: value = "task-4891925" [ 570.104777] env[61962]: _type = "Task" [ 570.104777] env[61962]: } to complete. {{(pid=61962) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 570.121053] env[61962]: DEBUG oslo_vmware.api [-] Task: {'id': task-4891925, 'name': CreateVM_Task} progress is 0%. {{(pid=61962) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 570.591903] env[61962]: DEBUG nova.compute.manager [req-2103c635-5ab6-414c-9dd9-de469fdebbaa req-fdb456f1-25f7-480d-81a2-5fafb2494bbd service nova] [instance: a6e6f30d-c667-4f70-bf60-596a8fe97188] Received event network-vif-plugged-c6449b59-aa15-425f-ab70-b701b0cf4248 {{(pid=61962) external_instance_event /opt/stack/nova/nova/compute/manager.py:11210}} [ 570.591903] env[61962]: DEBUG oslo_concurrency.lockutils [req-2103c635-5ab6-414c-9dd9-de469fdebbaa req-fdb456f1-25f7-480d-81a2-5fafb2494bbd service nova] Acquiring lock "a6e6f30d-c667-4f70-bf60-596a8fe97188-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=61962) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 570.592236] env[61962]: DEBUG oslo_concurrency.lockutils [req-2103c635-5ab6-414c-9dd9-de469fdebbaa req-fdb456f1-25f7-480d-81a2-5fafb2494bbd service nova] Lock "a6e6f30d-c667-4f70-bf60-596a8fe97188-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=61962) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 570.592470] env[61962]: DEBUG oslo_concurrency.lockutils [req-2103c635-5ab6-414c-9dd9-de469fdebbaa req-fdb456f1-25f7-480d-81a2-5fafb2494bbd service nova] Lock "a6e6f30d-c667-4f70-bf60-596a8fe97188-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=61962) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 570.592779] env[61962]: DEBUG nova.compute.manager [req-2103c635-5ab6-414c-9dd9-de469fdebbaa req-fdb456f1-25f7-480d-81a2-5fafb2494bbd service nova] [instance: a6e6f30d-c667-4f70-bf60-596a8fe97188] No waiting events found dispatching network-vif-plugged-c6449b59-aa15-425f-ab70-b701b0cf4248 {{(pid=61962) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 570.593101] env[61962]: WARNING nova.compute.manager [req-2103c635-5ab6-414c-9dd9-de469fdebbaa req-fdb456f1-25f7-480d-81a2-5fafb2494bbd service nova] [instance: a6e6f30d-c667-4f70-bf60-596a8fe97188] Received unexpected event network-vif-plugged-c6449b59-aa15-425f-ab70-b701b0cf4248 for instance with vm_state building and task_state spawning. [ 570.620678] env[61962]: DEBUG oslo_vmware.api [-] Task: {'id': task-4891925, 'name': CreateVM_Task, 'duration_secs': 0.422158} completed successfully. {{(pid=61962) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 570.620876] env[61962]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: a6e6f30d-c667-4f70-bf60-596a8fe97188] Created VM on the ESX host {{(pid=61962) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 570.622040] env[61962]: DEBUG oslo_concurrency.lockutils [None req-c0cd2188-095e-4a7d-a70d-63e0d919c4c3 tempest-MigrationsAdminTest-5656953 tempest-MigrationsAdminTest-5656953-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/f684bb17-3ab2-4bd5-a19e-4c36c57d0ebe" {{(pid=61962) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 570.622040] env[61962]: DEBUG oslo_concurrency.lockutils [None req-c0cd2188-095e-4a7d-a70d-63e0d919c4c3 tempest-MigrationsAdminTest-5656953 tempest-MigrationsAdminTest-5656953-project-member] Acquired lock "[datastore2] devstack-image-cache_base/f684bb17-3ab2-4bd5-a19e-4c36c57d0ebe" {{(pid=61962) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 570.622175] env[61962]: DEBUG oslo_concurrency.lockutils [None req-c0cd2188-095e-4a7d-a70d-63e0d919c4c3 tempest-MigrationsAdminTest-5656953 tempest-MigrationsAdminTest-5656953-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/f684bb17-3ab2-4bd5-a19e-4c36c57d0ebe" {{(pid=61962) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 570.622316] env[61962]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-d7e87a91-2323-4f49-a6b3-eb581d8babea {{(pid=61962) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 570.627931] env[61962]: DEBUG oslo_vmware.api [None req-c0cd2188-095e-4a7d-a70d-63e0d919c4c3 tempest-MigrationsAdminTest-5656953 tempest-MigrationsAdminTest-5656953-project-member] Waiting for the task: (returnval){ [ 570.627931] env[61962]: value = "session[5210918f-aadc-9b29-42fa-0929c2e42627]52f71d06-c9f9-9572-3982-a8f00f491443" [ 570.627931] env[61962]: _type = "Task" [ 570.627931] env[61962]: } to complete. {{(pid=61962) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 570.645328] env[61962]: DEBUG oslo_vmware.api [None req-c0cd2188-095e-4a7d-a70d-63e0d919c4c3 tempest-MigrationsAdminTest-5656953 tempest-MigrationsAdminTest-5656953-project-member] Task: {'id': session[5210918f-aadc-9b29-42fa-0929c2e42627]52f71d06-c9f9-9572-3982-a8f00f491443, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61962) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 571.140103] env[61962]: DEBUG oslo_concurrency.lockutils [None req-c0cd2188-095e-4a7d-a70d-63e0d919c4c3 tempest-MigrationsAdminTest-5656953 tempest-MigrationsAdminTest-5656953-project-member] Releasing lock "[datastore2] devstack-image-cache_base/f684bb17-3ab2-4bd5-a19e-4c36c57d0ebe" {{(pid=61962) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 571.140467] env[61962]: DEBUG nova.virt.vmwareapi.vmops [None req-c0cd2188-095e-4a7d-a70d-63e0d919c4c3 tempest-MigrationsAdminTest-5656953 tempest-MigrationsAdminTest-5656953-project-member] [instance: a6e6f30d-c667-4f70-bf60-596a8fe97188] Processing image f684bb17-3ab2-4bd5-a19e-4c36c57d0ebe {{(pid=61962) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 571.141829] env[61962]: DEBUG oslo_concurrency.lockutils [None req-c0cd2188-095e-4a7d-a70d-63e0d919c4c3 tempest-MigrationsAdminTest-5656953 tempest-MigrationsAdminTest-5656953-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/f684bb17-3ab2-4bd5-a19e-4c36c57d0ebe/f684bb17-3ab2-4bd5-a19e-4c36c57d0ebe.vmdk" {{(pid=61962) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 574.489073] env[61962]: DEBUG oslo_concurrency.lockutils [None req-719fe7db-a9f6-4ffa-bab2-00a4fa9c93c0 tempest-FloatingIPsAssociationTestJSON-152212675 tempest-FloatingIPsAssociationTestJSON-152212675-project-member] Acquiring lock "667f56c0-896f-4e25-9e8a-3954f7733dc0" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61962) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 574.489303] env[61962]: DEBUG oslo_concurrency.lockutils [None req-719fe7db-a9f6-4ffa-bab2-00a4fa9c93c0 tempest-FloatingIPsAssociationTestJSON-152212675 tempest-FloatingIPsAssociationTestJSON-152212675-project-member] Lock "667f56c0-896f-4e25-9e8a-3954f7733dc0" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61962) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 574.508114] env[61962]: DEBUG nova.compute.manager [None req-719fe7db-a9f6-4ffa-bab2-00a4fa9c93c0 tempest-FloatingIPsAssociationTestJSON-152212675 tempest-FloatingIPsAssociationTestJSON-152212675-project-member] [instance: 667f56c0-896f-4e25-9e8a-3954f7733dc0] Starting instance... {{(pid=61962) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2424}} [ 574.604503] env[61962]: DEBUG oslo_concurrency.lockutils [None req-719fe7db-a9f6-4ffa-bab2-00a4fa9c93c0 tempest-FloatingIPsAssociationTestJSON-152212675 tempest-FloatingIPsAssociationTestJSON-152212675-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61962) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 574.605058] env[61962]: DEBUG oslo_concurrency.lockutils [None req-719fe7db-a9f6-4ffa-bab2-00a4fa9c93c0 tempest-FloatingIPsAssociationTestJSON-152212675 tempest-FloatingIPsAssociationTestJSON-152212675-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=61962) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 574.606715] env[61962]: INFO nova.compute.claims [None req-719fe7db-a9f6-4ffa-bab2-00a4fa9c93c0 tempest-FloatingIPsAssociationTestJSON-152212675 tempest-FloatingIPsAssociationTestJSON-152212675-project-member] [instance: 667f56c0-896f-4e25-9e8a-3954f7733dc0] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 574.903566] env[61962]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-41df7bf0-1329-48e8-a5aa-6f4ce74f43d0 {{(pid=61962) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 574.912386] env[61962]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8476819b-f728-48b5-85e1-05f6f86d631f {{(pid=61962) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 574.955114] env[61962]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-63975706-b65f-454f-b8ff-169e3c60cbc3 {{(pid=61962) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 574.965566] env[61962]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2afe20cf-4c48-4e10-931b-849be58db1eb {{(pid=61962) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 574.978654] env[61962]: DEBUG nova.compute.provider_tree [None req-719fe7db-a9f6-4ffa-bab2-00a4fa9c93c0 tempest-FloatingIPsAssociationTestJSON-152212675 tempest-FloatingIPsAssociationTestJSON-152212675-project-member] Inventory has not changed in ProviderTree for provider: 5a20dc57-fddd-49ec-bab5-953a03eebaa1 {{(pid=61962) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 574.995033] env[61962]: DEBUG nova.scheduler.client.report [None req-719fe7db-a9f6-4ffa-bab2-00a4fa9c93c0 tempest-FloatingIPsAssociationTestJSON-152212675 tempest-FloatingIPsAssociationTestJSON-152212675-project-member] Inventory has not changed for provider 5a20dc57-fddd-49ec-bab5-953a03eebaa1 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 96, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61962) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 575.018640] env[61962]: DEBUG oslo_concurrency.lockutils [None req-719fe7db-a9f6-4ffa-bab2-00a4fa9c93c0 tempest-FloatingIPsAssociationTestJSON-152212675 tempest-FloatingIPsAssociationTestJSON-152212675-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.413s {{(pid=61962) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 575.018809] env[61962]: DEBUG nova.compute.manager [None req-719fe7db-a9f6-4ffa-bab2-00a4fa9c93c0 tempest-FloatingIPsAssociationTestJSON-152212675 tempest-FloatingIPsAssociationTestJSON-152212675-project-member] [instance: 667f56c0-896f-4e25-9e8a-3954f7733dc0] Start building networks asynchronously for instance. {{(pid=61962) _build_resources /opt/stack/nova/nova/compute/manager.py:2821}} [ 575.087110] env[61962]: DEBUG nova.compute.utils [None req-719fe7db-a9f6-4ffa-bab2-00a4fa9c93c0 tempest-FloatingIPsAssociationTestJSON-152212675 tempest-FloatingIPsAssociationTestJSON-152212675-project-member] Using /dev/sd instead of None {{(pid=61962) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 575.091688] env[61962]: DEBUG nova.compute.manager [None req-719fe7db-a9f6-4ffa-bab2-00a4fa9c93c0 tempest-FloatingIPsAssociationTestJSON-152212675 tempest-FloatingIPsAssociationTestJSON-152212675-project-member] [instance: 667f56c0-896f-4e25-9e8a-3954f7733dc0] Allocating IP information in the background. {{(pid=61962) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1973}} [ 575.092021] env[61962]: DEBUG nova.network.neutron [None req-719fe7db-a9f6-4ffa-bab2-00a4fa9c93c0 tempest-FloatingIPsAssociationTestJSON-152212675 tempest-FloatingIPsAssociationTestJSON-152212675-project-member] [instance: 667f56c0-896f-4e25-9e8a-3954f7733dc0] allocate_for_instance() {{(pid=61962) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 575.111236] env[61962]: DEBUG nova.compute.manager [None req-719fe7db-a9f6-4ffa-bab2-00a4fa9c93c0 tempest-FloatingIPsAssociationTestJSON-152212675 tempest-FloatingIPsAssociationTestJSON-152212675-project-member] [instance: 667f56c0-896f-4e25-9e8a-3954f7733dc0] Start building block device mappings for instance. {{(pid=61962) _build_resources /opt/stack/nova/nova/compute/manager.py:2856}} [ 575.133481] env[61962]: DEBUG nova.compute.manager [req-cfd13d5f-2de5-4270-8a66-d765c2f4ff0a req-7b8b9b61-0498-4640-9787-113678c9000f service nova] [instance: a6e6f30d-c667-4f70-bf60-596a8fe97188] Received event network-changed-c6449b59-aa15-425f-ab70-b701b0cf4248 {{(pid=61962) external_instance_event /opt/stack/nova/nova/compute/manager.py:11210}} [ 575.133719] env[61962]: DEBUG nova.compute.manager [req-cfd13d5f-2de5-4270-8a66-d765c2f4ff0a req-7b8b9b61-0498-4640-9787-113678c9000f service nova] [instance: a6e6f30d-c667-4f70-bf60-596a8fe97188] Refreshing instance network info cache due to event network-changed-c6449b59-aa15-425f-ab70-b701b0cf4248. {{(pid=61962) external_instance_event /opt/stack/nova/nova/compute/manager.py:11215}} [ 575.133955] env[61962]: DEBUG oslo_concurrency.lockutils [req-cfd13d5f-2de5-4270-8a66-d765c2f4ff0a req-7b8b9b61-0498-4640-9787-113678c9000f service nova] Acquiring lock "refresh_cache-a6e6f30d-c667-4f70-bf60-596a8fe97188" {{(pid=61962) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 575.136184] env[61962]: DEBUG oslo_concurrency.lockutils [req-cfd13d5f-2de5-4270-8a66-d765c2f4ff0a req-7b8b9b61-0498-4640-9787-113678c9000f service nova] Acquired lock "refresh_cache-a6e6f30d-c667-4f70-bf60-596a8fe97188" {{(pid=61962) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 575.136473] env[61962]: DEBUG nova.network.neutron [req-cfd13d5f-2de5-4270-8a66-d765c2f4ff0a req-7b8b9b61-0498-4640-9787-113678c9000f service nova] [instance: a6e6f30d-c667-4f70-bf60-596a8fe97188] Refreshing network info cache for port c6449b59-aa15-425f-ab70-b701b0cf4248 {{(pid=61962) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 575.207246] env[61962]: DEBUG nova.compute.manager [None req-719fe7db-a9f6-4ffa-bab2-00a4fa9c93c0 tempest-FloatingIPsAssociationTestJSON-152212675 tempest-FloatingIPsAssociationTestJSON-152212675-project-member] [instance: 667f56c0-896f-4e25-9e8a-3954f7733dc0] Start spawning the instance on the hypervisor. {{(pid=61962) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2630}} [ 575.240951] env[61962]: DEBUG nova.virt.hardware [None req-719fe7db-a9f6-4ffa-bab2-00a4fa9c93c0 tempest-FloatingIPsAssociationTestJSON-152212675 tempest-FloatingIPsAssociationTestJSON-152212675-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-12-01T12:09:23Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=128,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-12-01T12:09:07Z,direct_url=,disk_format='vmdk',id=f684bb17-3ab2-4bd5-a19e-4c36c57d0ebe,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='eef556fb5c4f49b889491ae31a496de5',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-12-01T12:09:08Z,virtual_size=,visibility=), allow threads: False {{(pid=61962) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 575.241171] env[61962]: DEBUG nova.virt.hardware [None req-719fe7db-a9f6-4ffa-bab2-00a4fa9c93c0 tempest-FloatingIPsAssociationTestJSON-152212675 tempest-FloatingIPsAssociationTestJSON-152212675-project-member] Flavor limits 0:0:0 {{(pid=61962) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 575.241325] env[61962]: DEBUG nova.virt.hardware [None req-719fe7db-a9f6-4ffa-bab2-00a4fa9c93c0 tempest-FloatingIPsAssociationTestJSON-152212675 tempest-FloatingIPsAssociationTestJSON-152212675-project-member] Image limits 0:0:0 {{(pid=61962) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 575.241679] env[61962]: DEBUG nova.virt.hardware [None req-719fe7db-a9f6-4ffa-bab2-00a4fa9c93c0 tempest-FloatingIPsAssociationTestJSON-152212675 tempest-FloatingIPsAssociationTestJSON-152212675-project-member] Flavor pref 0:0:0 {{(pid=61962) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 575.241679] env[61962]: DEBUG nova.virt.hardware [None req-719fe7db-a9f6-4ffa-bab2-00a4fa9c93c0 tempest-FloatingIPsAssociationTestJSON-152212675 tempest-FloatingIPsAssociationTestJSON-152212675-project-member] Image pref 0:0:0 {{(pid=61962) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 575.242047] env[61962]: DEBUG nova.virt.hardware [None req-719fe7db-a9f6-4ffa-bab2-00a4fa9c93c0 tempest-FloatingIPsAssociationTestJSON-152212675 tempest-FloatingIPsAssociationTestJSON-152212675-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61962) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 575.242825] env[61962]: DEBUG nova.virt.hardware [None req-719fe7db-a9f6-4ffa-bab2-00a4fa9c93c0 tempest-FloatingIPsAssociationTestJSON-152212675 tempest-FloatingIPsAssociationTestJSON-152212675-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61962) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 575.242825] env[61962]: DEBUG nova.virt.hardware [None req-719fe7db-a9f6-4ffa-bab2-00a4fa9c93c0 tempest-FloatingIPsAssociationTestJSON-152212675 tempest-FloatingIPsAssociationTestJSON-152212675-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61962) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 575.243049] env[61962]: DEBUG nova.virt.hardware [None req-719fe7db-a9f6-4ffa-bab2-00a4fa9c93c0 tempest-FloatingIPsAssociationTestJSON-152212675 tempest-FloatingIPsAssociationTestJSON-152212675-project-member] Got 1 possible topologies {{(pid=61962) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 575.243523] env[61962]: DEBUG nova.virt.hardware [None req-719fe7db-a9f6-4ffa-bab2-00a4fa9c93c0 tempest-FloatingIPsAssociationTestJSON-152212675 tempest-FloatingIPsAssociationTestJSON-152212675-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61962) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 575.243829] env[61962]: DEBUG nova.virt.hardware [None req-719fe7db-a9f6-4ffa-bab2-00a4fa9c93c0 tempest-FloatingIPsAssociationTestJSON-152212675 tempest-FloatingIPsAssociationTestJSON-152212675-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61962) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 575.245633] env[61962]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b400c955-3e62-4571-8651-424206826801 {{(pid=61962) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 575.259646] env[61962]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e3bc43a2-15d4-4680-914e-26707e4287b3 {{(pid=61962) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 575.398947] env[61962]: DEBUG nova.policy [None req-719fe7db-a9f6-4ffa-bab2-00a4fa9c93c0 tempest-FloatingIPsAssociationTestJSON-152212675 tempest-FloatingIPsAssociationTestJSON-152212675-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '10501992f6fa4ab7a50e245ad80b4c7e', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '6c00b1a76e464ad2a9788673e256de22', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61962) authorize /opt/stack/nova/nova/policy.py:203}} [ 577.174112] env[61962]: DEBUG nova.network.neutron [req-cfd13d5f-2de5-4270-8a66-d765c2f4ff0a req-7b8b9b61-0498-4640-9787-113678c9000f service nova] [instance: a6e6f30d-c667-4f70-bf60-596a8fe97188] Updated VIF entry in instance network info cache for port c6449b59-aa15-425f-ab70-b701b0cf4248. {{(pid=61962) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 577.174460] env[61962]: DEBUG nova.network.neutron [req-cfd13d5f-2de5-4270-8a66-d765c2f4ff0a req-7b8b9b61-0498-4640-9787-113678c9000f service nova] [instance: a6e6f30d-c667-4f70-bf60-596a8fe97188] Updating instance_info_cache with network_info: [{"id": "c6449b59-aa15-425f-ab70-b701b0cf4248", "address": "fa:16:3e:fe:65:46", "network": {"id": "dce9257e-a339-459f-9ce0-b9e3ec044f23", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.152", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "eef556fb5c4f49b889491ae31a496de5", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a69ed1dd-213a-4e30-992a-466735188bf6", "external-id": "nsx-vlan-transportzone-102", "segmentation_id": 102, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapc6449b59-aa", "ovs_interfaceid": "c6449b59-aa15-425f-ab70-b701b0cf4248", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61962) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 577.187121] env[61962]: DEBUG oslo_concurrency.lockutils [req-cfd13d5f-2de5-4270-8a66-d765c2f4ff0a req-7b8b9b61-0498-4640-9787-113678c9000f service nova] Releasing lock "refresh_cache-a6e6f30d-c667-4f70-bf60-596a8fe97188" {{(pid=61962) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 578.020882] env[61962]: DEBUG nova.network.neutron [None req-719fe7db-a9f6-4ffa-bab2-00a4fa9c93c0 tempest-FloatingIPsAssociationTestJSON-152212675 tempest-FloatingIPsAssociationTestJSON-152212675-project-member] [instance: 667f56c0-896f-4e25-9e8a-3954f7733dc0] Successfully created port: 1c64dbd0-5d85-4d00-90b8-141f403f6842 {{(pid=61962) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 578.706207] env[61962]: DEBUG oslo_concurrency.lockutils [None req-4f0380d0-0d3b-4e8d-a9a6-09d47c765b7b tempest-VolumesAssistedSnapshotsTest-1134839840 tempest-VolumesAssistedSnapshotsTest-1134839840-project-member] Acquiring lock "295064f0-9677-4287-877a-ef2a33a87fef" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61962) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 578.706639] env[61962]: DEBUG oslo_concurrency.lockutils [None req-4f0380d0-0d3b-4e8d-a9a6-09d47c765b7b tempest-VolumesAssistedSnapshotsTest-1134839840 tempest-VolumesAssistedSnapshotsTest-1134839840-project-member] Lock "295064f0-9677-4287-877a-ef2a33a87fef" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61962) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 579.600199] env[61962]: DEBUG nova.network.neutron [None req-719fe7db-a9f6-4ffa-bab2-00a4fa9c93c0 tempest-FloatingIPsAssociationTestJSON-152212675 tempest-FloatingIPsAssociationTestJSON-152212675-project-member] [instance: 667f56c0-896f-4e25-9e8a-3954f7733dc0] Successfully updated port: 1c64dbd0-5d85-4d00-90b8-141f403f6842 {{(pid=61962) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 579.617312] env[61962]: DEBUG oslo_concurrency.lockutils [None req-719fe7db-a9f6-4ffa-bab2-00a4fa9c93c0 tempest-FloatingIPsAssociationTestJSON-152212675 tempest-FloatingIPsAssociationTestJSON-152212675-project-member] Acquiring lock "refresh_cache-667f56c0-896f-4e25-9e8a-3954f7733dc0" {{(pid=61962) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 579.617312] env[61962]: DEBUG oslo_concurrency.lockutils [None req-719fe7db-a9f6-4ffa-bab2-00a4fa9c93c0 tempest-FloatingIPsAssociationTestJSON-152212675 tempest-FloatingIPsAssociationTestJSON-152212675-project-member] Acquired lock "refresh_cache-667f56c0-896f-4e25-9e8a-3954f7733dc0" {{(pid=61962) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 579.617312] env[61962]: DEBUG nova.network.neutron [None req-719fe7db-a9f6-4ffa-bab2-00a4fa9c93c0 tempest-FloatingIPsAssociationTestJSON-152212675 tempest-FloatingIPsAssociationTestJSON-152212675-project-member] [instance: 667f56c0-896f-4e25-9e8a-3954f7733dc0] Building network info cache for instance {{(pid=61962) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 579.730332] env[61962]: DEBUG nova.network.neutron [None req-719fe7db-a9f6-4ffa-bab2-00a4fa9c93c0 tempest-FloatingIPsAssociationTestJSON-152212675 tempest-FloatingIPsAssociationTestJSON-152212675-project-member] [instance: 667f56c0-896f-4e25-9e8a-3954f7733dc0] Instance cache missing network info. {{(pid=61962) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 580.016714] env[61962]: DEBUG nova.network.neutron [None req-719fe7db-a9f6-4ffa-bab2-00a4fa9c93c0 tempest-FloatingIPsAssociationTestJSON-152212675 tempest-FloatingIPsAssociationTestJSON-152212675-project-member] [instance: 667f56c0-896f-4e25-9e8a-3954f7733dc0] Updating instance_info_cache with network_info: [{"id": "1c64dbd0-5d85-4d00-90b8-141f403f6842", "address": "fa:16:3e:cb:73:32", "network": {"id": "3ec7d86e-a6c3-4b18-a68c-4e8357ab27c7", "bridge": "br-int", "label": "tempest-FloatingIPsAssociationTestJSON-62949195-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "6c00b1a76e464ad2a9788673e256de22", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "6ffea58a-e043-4ad1-9ad7-45a61fdca001", "external-id": "nsx-vlan-transportzone-662", "segmentation_id": 662, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap1c64dbd0-5d", "ovs_interfaceid": "1c64dbd0-5d85-4d00-90b8-141f403f6842", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61962) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 580.030279] env[61962]: DEBUG oslo_concurrency.lockutils [None req-719fe7db-a9f6-4ffa-bab2-00a4fa9c93c0 tempest-FloatingIPsAssociationTestJSON-152212675 tempest-FloatingIPsAssociationTestJSON-152212675-project-member] Releasing lock "refresh_cache-667f56c0-896f-4e25-9e8a-3954f7733dc0" {{(pid=61962) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 580.030591] env[61962]: DEBUG nova.compute.manager [None req-719fe7db-a9f6-4ffa-bab2-00a4fa9c93c0 tempest-FloatingIPsAssociationTestJSON-152212675 tempest-FloatingIPsAssociationTestJSON-152212675-project-member] [instance: 667f56c0-896f-4e25-9e8a-3954f7733dc0] Instance network_info: |[{"id": "1c64dbd0-5d85-4d00-90b8-141f403f6842", "address": "fa:16:3e:cb:73:32", "network": {"id": "3ec7d86e-a6c3-4b18-a68c-4e8357ab27c7", "bridge": "br-int", "label": "tempest-FloatingIPsAssociationTestJSON-62949195-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "6c00b1a76e464ad2a9788673e256de22", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "6ffea58a-e043-4ad1-9ad7-45a61fdca001", "external-id": "nsx-vlan-transportzone-662", "segmentation_id": 662, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap1c64dbd0-5d", "ovs_interfaceid": "1c64dbd0-5d85-4d00-90b8-141f403f6842", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=61962) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 580.031311] env[61962]: DEBUG nova.virt.vmwareapi.vmops [None req-719fe7db-a9f6-4ffa-bab2-00a4fa9c93c0 tempest-FloatingIPsAssociationTestJSON-152212675 tempest-FloatingIPsAssociationTestJSON-152212675-project-member] [instance: 667f56c0-896f-4e25-9e8a-3954f7733dc0] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:cb:73:32', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '6ffea58a-e043-4ad1-9ad7-45a61fdca001', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '1c64dbd0-5d85-4d00-90b8-141f403f6842', 'vif_model': 'vmxnet3'}] {{(pid=61962) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 580.040680] env[61962]: DEBUG nova.virt.vmwareapi.vm_util [None req-719fe7db-a9f6-4ffa-bab2-00a4fa9c93c0 tempest-FloatingIPsAssociationTestJSON-152212675 tempest-FloatingIPsAssociationTestJSON-152212675-project-member] Creating folder: Project (6c00b1a76e464ad2a9788673e256de22). Parent ref: group-v953327. {{(pid=61962) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 580.041770] env[61962]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-f57d0ff8-ec78-4f68-a621-3e04ac194e09 {{(pid=61962) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 580.057123] env[61962]: INFO nova.virt.vmwareapi.vm_util [None req-719fe7db-a9f6-4ffa-bab2-00a4fa9c93c0 tempest-FloatingIPsAssociationTestJSON-152212675 tempest-FloatingIPsAssociationTestJSON-152212675-project-member] Created folder: Project (6c00b1a76e464ad2a9788673e256de22) in parent group-v953327. [ 580.057246] env[61962]: DEBUG nova.virt.vmwareapi.vm_util [None req-719fe7db-a9f6-4ffa-bab2-00a4fa9c93c0 tempest-FloatingIPsAssociationTestJSON-152212675 tempest-FloatingIPsAssociationTestJSON-152212675-project-member] Creating folder: Instances. Parent ref: group-v953355. {{(pid=61962) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 580.057495] env[61962]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-b8c07902-594b-4354-9e60-e823a8c85148 {{(pid=61962) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 580.067376] env[61962]: INFO nova.virt.vmwareapi.vm_util [None req-719fe7db-a9f6-4ffa-bab2-00a4fa9c93c0 tempest-FloatingIPsAssociationTestJSON-152212675 tempest-FloatingIPsAssociationTestJSON-152212675-project-member] Created folder: Instances in parent group-v953355. [ 580.067765] env[61962]: DEBUG oslo.service.loopingcall [None req-719fe7db-a9f6-4ffa-bab2-00a4fa9c93c0 tempest-FloatingIPsAssociationTestJSON-152212675 tempest-FloatingIPsAssociationTestJSON-152212675-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=61962) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 580.069119] env[61962]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 667f56c0-896f-4e25-9e8a-3954f7733dc0] Creating VM on the ESX host {{(pid=61962) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 580.069119] env[61962]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-f8795b2a-a927-4bc8-aa75-3b83960571bd {{(pid=61962) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 580.091399] env[61962]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 580.091399] env[61962]: value = "task-4891928" [ 580.091399] env[61962]: _type = "Task" [ 580.091399] env[61962]: } to complete. {{(pid=61962) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 580.101709] env[61962]: DEBUG oslo_vmware.api [-] Task: {'id': task-4891928, 'name': CreateVM_Task} progress is 0%. {{(pid=61962) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 580.602454] env[61962]: DEBUG oslo_vmware.api [-] Task: {'id': task-4891928, 'name': CreateVM_Task, 'duration_secs': 0.364946} completed successfully. {{(pid=61962) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 580.602454] env[61962]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 667f56c0-896f-4e25-9e8a-3954f7733dc0] Created VM on the ESX host {{(pid=61962) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 580.605100] env[61962]: DEBUG oslo_concurrency.lockutils [None req-719fe7db-a9f6-4ffa-bab2-00a4fa9c93c0 tempest-FloatingIPsAssociationTestJSON-152212675 tempest-FloatingIPsAssociationTestJSON-152212675-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/f684bb17-3ab2-4bd5-a19e-4c36c57d0ebe" {{(pid=61962) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 580.605321] env[61962]: DEBUG oslo_concurrency.lockutils [None req-719fe7db-a9f6-4ffa-bab2-00a4fa9c93c0 tempest-FloatingIPsAssociationTestJSON-152212675 tempest-FloatingIPsAssociationTestJSON-152212675-project-member] Acquired lock "[datastore2] devstack-image-cache_base/f684bb17-3ab2-4bd5-a19e-4c36c57d0ebe" {{(pid=61962) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 580.605655] env[61962]: DEBUG oslo_concurrency.lockutils [None req-719fe7db-a9f6-4ffa-bab2-00a4fa9c93c0 tempest-FloatingIPsAssociationTestJSON-152212675 tempest-FloatingIPsAssociationTestJSON-152212675-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/f684bb17-3ab2-4bd5-a19e-4c36c57d0ebe" {{(pid=61962) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 580.605934] env[61962]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-73d25618-e515-4470-a3e2-17290a72a45b {{(pid=61962) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 580.611563] env[61962]: DEBUG oslo_vmware.api [None req-719fe7db-a9f6-4ffa-bab2-00a4fa9c93c0 tempest-FloatingIPsAssociationTestJSON-152212675 tempest-FloatingIPsAssociationTestJSON-152212675-project-member] Waiting for the task: (returnval){ [ 580.611563] env[61962]: value = "session[5210918f-aadc-9b29-42fa-0929c2e42627]52941b00-8d47-0f67-c7bd-4c1dd3a76e87" [ 580.611563] env[61962]: _type = "Task" [ 580.611563] env[61962]: } to complete. {{(pid=61962) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 580.621773] env[61962]: DEBUG oslo_vmware.api [None req-719fe7db-a9f6-4ffa-bab2-00a4fa9c93c0 tempest-FloatingIPsAssociationTestJSON-152212675 tempest-FloatingIPsAssociationTestJSON-152212675-project-member] Task: {'id': session[5210918f-aadc-9b29-42fa-0929c2e42627]52941b00-8d47-0f67-c7bd-4c1dd3a76e87, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61962) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 580.929685] env[61962]: DEBUG oslo_concurrency.lockutils [None req-90db2dbd-038d-42b8-af4c-dd3300a68214 tempest-ServersV294TestFqdnHostnames-1341387601 tempest-ServersV294TestFqdnHostnames-1341387601-project-member] Acquiring lock "6996289c-f881-4733-8943-98e3633e43ca" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61962) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 580.929992] env[61962]: DEBUG oslo_concurrency.lockutils [None req-90db2dbd-038d-42b8-af4c-dd3300a68214 tempest-ServersV294TestFqdnHostnames-1341387601 tempest-ServersV294TestFqdnHostnames-1341387601-project-member] Lock "6996289c-f881-4733-8943-98e3633e43ca" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61962) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 581.126341] env[61962]: DEBUG oslo_concurrency.lockutils [None req-719fe7db-a9f6-4ffa-bab2-00a4fa9c93c0 tempest-FloatingIPsAssociationTestJSON-152212675 tempest-FloatingIPsAssociationTestJSON-152212675-project-member] Releasing lock "[datastore2] devstack-image-cache_base/f684bb17-3ab2-4bd5-a19e-4c36c57d0ebe" {{(pid=61962) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 581.126637] env[61962]: DEBUG nova.virt.vmwareapi.vmops [None req-719fe7db-a9f6-4ffa-bab2-00a4fa9c93c0 tempest-FloatingIPsAssociationTestJSON-152212675 tempest-FloatingIPsAssociationTestJSON-152212675-project-member] [instance: 667f56c0-896f-4e25-9e8a-3954f7733dc0] Processing image f684bb17-3ab2-4bd5-a19e-4c36c57d0ebe {{(pid=61962) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 581.126847] env[61962]: DEBUG oslo_concurrency.lockutils [None req-719fe7db-a9f6-4ffa-bab2-00a4fa9c93c0 tempest-FloatingIPsAssociationTestJSON-152212675 tempest-FloatingIPsAssociationTestJSON-152212675-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/f684bb17-3ab2-4bd5-a19e-4c36c57d0ebe/f684bb17-3ab2-4bd5-a19e-4c36c57d0ebe.vmdk" {{(pid=61962) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 582.166760] env[61962]: DEBUG oslo_concurrency.lockutils [None req-2457412b-4345-46bb-8773-263f9b3b3e38 tempest-MultipleCreateTestJSON-937113120 tempest-MultipleCreateTestJSON-937113120-project-member] Acquiring lock "85c6b6af-f1b5-47ca-8e02-61c3669ec02b" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61962) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 582.170747] env[61962]: DEBUG oslo_concurrency.lockutils [None req-2457412b-4345-46bb-8773-263f9b3b3e38 tempest-MultipleCreateTestJSON-937113120 tempest-MultipleCreateTestJSON-937113120-project-member] Lock "85c6b6af-f1b5-47ca-8e02-61c3669ec02b" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61962) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 582.194520] env[61962]: DEBUG nova.compute.manager [req-9ced835d-469d-4083-a1f6-6a5ce5951d21 req-207b6b9f-b5f4-4ee9-b407-6bf10b913dbd service nova] [instance: 667f56c0-896f-4e25-9e8a-3954f7733dc0] Received event network-vif-plugged-1c64dbd0-5d85-4d00-90b8-141f403f6842 {{(pid=61962) external_instance_event /opt/stack/nova/nova/compute/manager.py:11210}} [ 582.194520] env[61962]: DEBUG oslo_concurrency.lockutils [req-9ced835d-469d-4083-a1f6-6a5ce5951d21 req-207b6b9f-b5f4-4ee9-b407-6bf10b913dbd service nova] Acquiring lock "667f56c0-896f-4e25-9e8a-3954f7733dc0-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=61962) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 582.194944] env[61962]: DEBUG oslo_concurrency.lockutils [req-9ced835d-469d-4083-a1f6-6a5ce5951d21 req-207b6b9f-b5f4-4ee9-b407-6bf10b913dbd service nova] Lock "667f56c0-896f-4e25-9e8a-3954f7733dc0-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=61962) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 582.194944] env[61962]: DEBUG oslo_concurrency.lockutils [req-9ced835d-469d-4083-a1f6-6a5ce5951d21 req-207b6b9f-b5f4-4ee9-b407-6bf10b913dbd service nova] Lock "667f56c0-896f-4e25-9e8a-3954f7733dc0-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=61962) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 582.196374] env[61962]: DEBUG nova.compute.manager [req-9ced835d-469d-4083-a1f6-6a5ce5951d21 req-207b6b9f-b5f4-4ee9-b407-6bf10b913dbd service nova] [instance: 667f56c0-896f-4e25-9e8a-3954f7733dc0] No waiting events found dispatching network-vif-plugged-1c64dbd0-5d85-4d00-90b8-141f403f6842 {{(pid=61962) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 582.196374] env[61962]: WARNING nova.compute.manager [req-9ced835d-469d-4083-a1f6-6a5ce5951d21 req-207b6b9f-b5f4-4ee9-b407-6bf10b913dbd service nova] [instance: 667f56c0-896f-4e25-9e8a-3954f7733dc0] Received unexpected event network-vif-plugged-1c64dbd0-5d85-4d00-90b8-141f403f6842 for instance with vm_state building and task_state spawning. [ 582.229110] env[61962]: DEBUG oslo_concurrency.lockutils [None req-2457412b-4345-46bb-8773-263f9b3b3e38 tempest-MultipleCreateTestJSON-937113120 tempest-MultipleCreateTestJSON-937113120-project-member] Acquiring lock "0aa340f7-a219-47f5-9d7d-dd4062643cf7" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61962) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 582.231684] env[61962]: DEBUG oslo_concurrency.lockutils [None req-2457412b-4345-46bb-8773-263f9b3b3e38 tempest-MultipleCreateTestJSON-937113120 tempest-MultipleCreateTestJSON-937113120-project-member] Lock "0aa340f7-a219-47f5-9d7d-dd4062643cf7" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61962) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 582.426812] env[61962]: DEBUG oslo_concurrency.lockutils [None req-b1726c72-0590-4b2f-925e-e9989081dc51 tempest-ServersAdminNegativeTestJSON-1331122422 tempest-ServersAdminNegativeTestJSON-1331122422-project-member] Acquiring lock "968692bd-12ae-46aa-b1de-5cd312f436ee" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61962) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 582.426812] env[61962]: DEBUG oslo_concurrency.lockutils [None req-b1726c72-0590-4b2f-925e-e9989081dc51 tempest-ServersAdminNegativeTestJSON-1331122422 tempest-ServersAdminNegativeTestJSON-1331122422-project-member] Lock "968692bd-12ae-46aa-b1de-5cd312f436ee" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61962) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 582.656618] env[61962]: DEBUG oslo_concurrency.lockutils [None req-74ed5e63-44fa-4df0-9126-61e8698d9b18 tempest-ListServersNegativeTestJSON-662338656 tempest-ListServersNegativeTestJSON-662338656-project-member] Acquiring lock "09098197-59d7-4e14-b76c-8366a1ea5fd6" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61962) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 582.657188] env[61962]: DEBUG oslo_concurrency.lockutils [None req-74ed5e63-44fa-4df0-9126-61e8698d9b18 tempest-ListServersNegativeTestJSON-662338656 tempest-ListServersNegativeTestJSON-662338656-project-member] Lock "09098197-59d7-4e14-b76c-8366a1ea5fd6" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=61962) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 582.688377] env[61962]: DEBUG oslo_concurrency.lockutils [None req-74ed5e63-44fa-4df0-9126-61e8698d9b18 tempest-ListServersNegativeTestJSON-662338656 tempest-ListServersNegativeTestJSON-662338656-project-member] Acquiring lock "4fcdc4fd-b1f8-4188-a502-49cf675c9bdd" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61962) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 582.688668] env[61962]: DEBUG oslo_concurrency.lockutils [None req-74ed5e63-44fa-4df0-9126-61e8698d9b18 tempest-ListServersNegativeTestJSON-662338656 tempest-ListServersNegativeTestJSON-662338656-project-member] Lock "4fcdc4fd-b1f8-4188-a502-49cf675c9bdd" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=61962) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 582.719376] env[61962]: DEBUG oslo_concurrency.lockutils [None req-74ed5e63-44fa-4df0-9126-61e8698d9b18 tempest-ListServersNegativeTestJSON-662338656 tempest-ListServersNegativeTestJSON-662338656-project-member] Acquiring lock "d6fc81f2-0fa6-4e9c-8d1b-60f0038112b0" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61962) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 582.719785] env[61962]: DEBUG oslo_concurrency.lockutils [None req-74ed5e63-44fa-4df0-9126-61e8698d9b18 tempest-ListServersNegativeTestJSON-662338656 tempest-ListServersNegativeTestJSON-662338656-project-member] Lock "d6fc81f2-0fa6-4e9c-8d1b-60f0038112b0" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=61962) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 583.885403] env[61962]: DEBUG oslo_concurrency.lockutils [None req-61235e9d-5a5f-4393-bfa4-a1ae88f788e2 tempest-ServersWithSpecificFlavorTestJSON-2100413347 tempest-ServersWithSpecificFlavorTestJSON-2100413347-project-member] Acquiring lock "511ce01a-0bea-49ba-b0e3-2a006ee88d32" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61962) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 583.885767] env[61962]: DEBUG oslo_concurrency.lockutils [None req-61235e9d-5a5f-4393-bfa4-a1ae88f788e2 tempest-ServersWithSpecificFlavorTestJSON-2100413347 tempest-ServersWithSpecificFlavorTestJSON-2100413347-project-member] Lock "511ce01a-0bea-49ba-b0e3-2a006ee88d32" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61962) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 585.511082] env[61962]: DEBUG oslo_concurrency.lockutils [None req-99a9b1b0-e416-4a91-80e7-ba6a6163607b tempest-ImagesOneServerNegativeTestJSON-1618499339 tempest-ImagesOneServerNegativeTestJSON-1618499339-project-member] Acquiring lock "b241f341-8413-4f6f-b054-d9209640d652" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61962) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 585.511392] env[61962]: DEBUG oslo_concurrency.lockutils [None req-99a9b1b0-e416-4a91-80e7-ba6a6163607b tempest-ImagesOneServerNegativeTestJSON-1618499339 tempest-ImagesOneServerNegativeTestJSON-1618499339-project-member] Lock "b241f341-8413-4f6f-b054-d9209640d652" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61962) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 585.834476] env[61962]: DEBUG nova.compute.manager [req-e0f770a8-fc46-4c94-b5d4-140dce221ce2 req-f3dd9d7a-1eca-46c2-ba75-e258868b03d4 service nova] [instance: 667f56c0-896f-4e25-9e8a-3954f7733dc0] Received event network-changed-1c64dbd0-5d85-4d00-90b8-141f403f6842 {{(pid=61962) external_instance_event /opt/stack/nova/nova/compute/manager.py:11210}} [ 585.834703] env[61962]: DEBUG nova.compute.manager [req-e0f770a8-fc46-4c94-b5d4-140dce221ce2 req-f3dd9d7a-1eca-46c2-ba75-e258868b03d4 service nova] [instance: 667f56c0-896f-4e25-9e8a-3954f7733dc0] Refreshing instance network info cache due to event network-changed-1c64dbd0-5d85-4d00-90b8-141f403f6842. {{(pid=61962) external_instance_event /opt/stack/nova/nova/compute/manager.py:11215}} [ 585.835094] env[61962]: DEBUG oslo_concurrency.lockutils [req-e0f770a8-fc46-4c94-b5d4-140dce221ce2 req-f3dd9d7a-1eca-46c2-ba75-e258868b03d4 service nova] Acquiring lock "refresh_cache-667f56c0-896f-4e25-9e8a-3954f7733dc0" {{(pid=61962) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 585.838907] env[61962]: DEBUG oslo_concurrency.lockutils [req-e0f770a8-fc46-4c94-b5d4-140dce221ce2 req-f3dd9d7a-1eca-46c2-ba75-e258868b03d4 service nova] Acquired lock "refresh_cache-667f56c0-896f-4e25-9e8a-3954f7733dc0" {{(pid=61962) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 585.838907] env[61962]: DEBUG nova.network.neutron [req-e0f770a8-fc46-4c94-b5d4-140dce221ce2 req-f3dd9d7a-1eca-46c2-ba75-e258868b03d4 service nova] [instance: 667f56c0-896f-4e25-9e8a-3954f7733dc0] Refreshing network info cache for port 1c64dbd0-5d85-4d00-90b8-141f403f6842 {{(pid=61962) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 586.708522] env[61962]: DEBUG nova.network.neutron [req-e0f770a8-fc46-4c94-b5d4-140dce221ce2 req-f3dd9d7a-1eca-46c2-ba75-e258868b03d4 service nova] [instance: 667f56c0-896f-4e25-9e8a-3954f7733dc0] Updated VIF entry in instance network info cache for port 1c64dbd0-5d85-4d00-90b8-141f403f6842. {{(pid=61962) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 586.708923] env[61962]: DEBUG nova.network.neutron [req-e0f770a8-fc46-4c94-b5d4-140dce221ce2 req-f3dd9d7a-1eca-46c2-ba75-e258868b03d4 service nova] [instance: 667f56c0-896f-4e25-9e8a-3954f7733dc0] Updating instance_info_cache with network_info: [{"id": "1c64dbd0-5d85-4d00-90b8-141f403f6842", "address": "fa:16:3e:cb:73:32", "network": {"id": "3ec7d86e-a6c3-4b18-a68c-4e8357ab27c7", "bridge": "br-int", "label": "tempest-FloatingIPsAssociationTestJSON-62949195-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "6c00b1a76e464ad2a9788673e256de22", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "6ffea58a-e043-4ad1-9ad7-45a61fdca001", "external-id": "nsx-vlan-transportzone-662", "segmentation_id": 662, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap1c64dbd0-5d", "ovs_interfaceid": "1c64dbd0-5d85-4d00-90b8-141f403f6842", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61962) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 586.730704] env[61962]: DEBUG oslo_concurrency.lockutils [req-e0f770a8-fc46-4c94-b5d4-140dce221ce2 req-f3dd9d7a-1eca-46c2-ba75-e258868b03d4 service nova] Releasing lock "refresh_cache-667f56c0-896f-4e25-9e8a-3954f7733dc0" {{(pid=61962) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 590.953966] env[61962]: DEBUG oslo_concurrency.lockutils [None req-9646297f-687a-455b-8b65-c7b7ac20c2d1 tempest-AttachInterfacesTestJSON-87713823 tempest-AttachInterfacesTestJSON-87713823-project-member] Acquiring lock "cfc08011-0291-450a-96eb-c4f8002e07e5" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61962) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 590.954278] env[61962]: DEBUG oslo_concurrency.lockutils [None req-9646297f-687a-455b-8b65-c7b7ac20c2d1 tempest-AttachInterfacesTestJSON-87713823 tempest-AttachInterfacesTestJSON-87713823-project-member] Lock "cfc08011-0291-450a-96eb-c4f8002e07e5" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61962) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 593.054251] env[61962]: DEBUG oslo_concurrency.lockutils [None req-5534e9bf-049c-4fb6-a71d-c79308fe60f6 tempest-ImagesNegativeTestJSON-192701559 tempest-ImagesNegativeTestJSON-192701559-project-member] Acquiring lock "a20846de-721a-4bb9-9826-e7c7ebf170c4" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61962) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 593.055062] env[61962]: DEBUG oslo_concurrency.lockutils [None req-5534e9bf-049c-4fb6-a71d-c79308fe60f6 tempest-ImagesNegativeTestJSON-192701559 tempest-ImagesNegativeTestJSON-192701559-project-member] Lock "a20846de-721a-4bb9-9826-e7c7ebf170c4" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=61962) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 598.157764] env[61962]: WARNING oslo_vmware.rw_handles [None req-98f60c33-8db1-481d-81f0-6e45672909f4 tempest-ServerDiagnosticsV248Test-33376295 tempest-ServerDiagnosticsV248Test-33376295-project-member] Error occurred while reading the HTTP response.: http.client.RemoteDisconnected: Remote end closed connection without response [ 598.157764] env[61962]: ERROR oslo_vmware.rw_handles Traceback (most recent call last): [ 598.157764] env[61962]: ERROR oslo_vmware.rw_handles File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py", line 283, in close [ 598.157764] env[61962]: ERROR oslo_vmware.rw_handles self._conn.getresponse() [ 598.157764] env[61962]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 1375, in getresponse [ 598.157764] env[61962]: ERROR oslo_vmware.rw_handles response.begin() [ 598.157764] env[61962]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 318, in begin [ 598.157764] env[61962]: ERROR oslo_vmware.rw_handles version, status, reason = self._read_status() [ 598.157764] env[61962]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 287, in _read_status [ 598.157764] env[61962]: ERROR oslo_vmware.rw_handles raise RemoteDisconnected("Remote end closed connection without" [ 598.157764] env[61962]: ERROR oslo_vmware.rw_handles http.client.RemoteDisconnected: Remote end closed connection without response [ 598.157764] env[61962]: ERROR oslo_vmware.rw_handles [ 598.158509] env[61962]: DEBUG nova.virt.vmwareapi.images [None req-98f60c33-8db1-481d-81f0-6e45672909f4 tempest-ServerDiagnosticsV248Test-33376295 tempest-ServerDiagnosticsV248Test-33376295-project-member] [instance: 06028841-b941-4352-b5c1-fa71cc87a4a7] Downloaded image file data f684bb17-3ab2-4bd5-a19e-4c36c57d0ebe to vmware_temp/fd0d674d-87ea-46eb-bfc8-13a952dc5cdc/f684bb17-3ab2-4bd5-a19e-4c36c57d0ebe/tmp-sparse.vmdk on the data store datastore2 {{(pid=61962) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:258}} [ 598.160966] env[61962]: DEBUG nova.virt.vmwareapi.vmops [None req-98f60c33-8db1-481d-81f0-6e45672909f4 tempest-ServerDiagnosticsV248Test-33376295 tempest-ServerDiagnosticsV248Test-33376295-project-member] [instance: 06028841-b941-4352-b5c1-fa71cc87a4a7] Caching image {{(pid=61962) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 598.161478] env[61962]: DEBUG nova.virt.vmwareapi.vm_util [None req-98f60c33-8db1-481d-81f0-6e45672909f4 tempest-ServerDiagnosticsV248Test-33376295 tempest-ServerDiagnosticsV248Test-33376295-project-member] Copying Virtual Disk [datastore2] vmware_temp/fd0d674d-87ea-46eb-bfc8-13a952dc5cdc/f684bb17-3ab2-4bd5-a19e-4c36c57d0ebe/tmp-sparse.vmdk to [datastore2] vmware_temp/fd0d674d-87ea-46eb-bfc8-13a952dc5cdc/f684bb17-3ab2-4bd5-a19e-4c36c57d0ebe/f684bb17-3ab2-4bd5-a19e-4c36c57d0ebe.vmdk {{(pid=61962) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 598.161813] env[61962]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-d5ba304c-9fa1-4fe8-93f0-489597404a76 {{(pid=61962) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 598.175104] env[61962]: DEBUG oslo_vmware.api [None req-98f60c33-8db1-481d-81f0-6e45672909f4 tempest-ServerDiagnosticsV248Test-33376295 tempest-ServerDiagnosticsV248Test-33376295-project-member] Waiting for the task: (returnval){ [ 598.175104] env[61962]: value = "task-4891938" [ 598.175104] env[61962]: _type = "Task" [ 598.175104] env[61962]: } to complete. {{(pid=61962) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 598.189398] env[61962]: DEBUG oslo_vmware.api [None req-98f60c33-8db1-481d-81f0-6e45672909f4 tempest-ServerDiagnosticsV248Test-33376295 tempest-ServerDiagnosticsV248Test-33376295-project-member] Task: {'id': task-4891938, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61962) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 598.688960] env[61962]: DEBUG oslo_vmware.exceptions [None req-98f60c33-8db1-481d-81f0-6e45672909f4 tempest-ServerDiagnosticsV248Test-33376295 tempest-ServerDiagnosticsV248Test-33376295-project-member] Fault InvalidArgument not matched. {{(pid=61962) get_fault_class /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/exceptions.py:290}} [ 598.691208] env[61962]: DEBUG oslo_concurrency.lockutils [None req-98f60c33-8db1-481d-81f0-6e45672909f4 tempest-ServerDiagnosticsV248Test-33376295 tempest-ServerDiagnosticsV248Test-33376295-project-member] Releasing lock "[datastore2] devstack-image-cache_base/f684bb17-3ab2-4bd5-a19e-4c36c57d0ebe/f684bb17-3ab2-4bd5-a19e-4c36c57d0ebe.vmdk" {{(pid=61962) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 598.695139] env[61962]: ERROR nova.compute.manager [None req-98f60c33-8db1-481d-81f0-6e45672909f4 tempest-ServerDiagnosticsV248Test-33376295 tempest-ServerDiagnosticsV248Test-33376295-project-member] [instance: 06028841-b941-4352-b5c1-fa71cc87a4a7] Instance failed to spawn: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 598.695139] env[61962]: Faults: ['InvalidArgument'] [ 598.695139] env[61962]: ERROR nova.compute.manager [instance: 06028841-b941-4352-b5c1-fa71cc87a4a7] Traceback (most recent call last): [ 598.695139] env[61962]: ERROR nova.compute.manager [instance: 06028841-b941-4352-b5c1-fa71cc87a4a7] File "/opt/stack/nova/nova/compute/manager.py", line 2886, in _build_resources [ 598.695139] env[61962]: ERROR nova.compute.manager [instance: 06028841-b941-4352-b5c1-fa71cc87a4a7] yield resources [ 598.695139] env[61962]: ERROR nova.compute.manager [instance: 06028841-b941-4352-b5c1-fa71cc87a4a7] File "/opt/stack/nova/nova/compute/manager.py", line 2633, in _build_and_run_instance [ 598.695139] env[61962]: ERROR nova.compute.manager [instance: 06028841-b941-4352-b5c1-fa71cc87a4a7] self.driver.spawn(context, instance, image_meta, [ 598.695139] env[61962]: ERROR nova.compute.manager [instance: 06028841-b941-4352-b5c1-fa71cc87a4a7] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 598.695139] env[61962]: ERROR nova.compute.manager [instance: 06028841-b941-4352-b5c1-fa71cc87a4a7] self._vmops.spawn(context, instance, image_meta, injected_files, [ 598.695139] env[61962]: ERROR nova.compute.manager [instance: 06028841-b941-4352-b5c1-fa71cc87a4a7] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 598.695139] env[61962]: ERROR nova.compute.manager [instance: 06028841-b941-4352-b5c1-fa71cc87a4a7] self._fetch_image_if_missing(context, vi) [ 598.695139] env[61962]: ERROR nova.compute.manager [instance: 06028841-b941-4352-b5c1-fa71cc87a4a7] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 598.695565] env[61962]: ERROR nova.compute.manager [instance: 06028841-b941-4352-b5c1-fa71cc87a4a7] image_cache(vi, tmp_image_ds_loc) [ 598.695565] env[61962]: ERROR nova.compute.manager [instance: 06028841-b941-4352-b5c1-fa71cc87a4a7] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 598.695565] env[61962]: ERROR nova.compute.manager [instance: 06028841-b941-4352-b5c1-fa71cc87a4a7] vm_util.copy_virtual_disk( [ 598.695565] env[61962]: ERROR nova.compute.manager [instance: 06028841-b941-4352-b5c1-fa71cc87a4a7] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 598.695565] env[61962]: ERROR nova.compute.manager [instance: 06028841-b941-4352-b5c1-fa71cc87a4a7] session._wait_for_task(vmdk_copy_task) [ 598.695565] env[61962]: ERROR nova.compute.manager [instance: 06028841-b941-4352-b5c1-fa71cc87a4a7] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 598.695565] env[61962]: ERROR nova.compute.manager [instance: 06028841-b941-4352-b5c1-fa71cc87a4a7] return self.wait_for_task(task_ref) [ 598.695565] env[61962]: ERROR nova.compute.manager [instance: 06028841-b941-4352-b5c1-fa71cc87a4a7] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 598.695565] env[61962]: ERROR nova.compute.manager [instance: 06028841-b941-4352-b5c1-fa71cc87a4a7] return evt.wait() [ 598.695565] env[61962]: ERROR nova.compute.manager [instance: 06028841-b941-4352-b5c1-fa71cc87a4a7] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 598.695565] env[61962]: ERROR nova.compute.manager [instance: 06028841-b941-4352-b5c1-fa71cc87a4a7] result = hub.switch() [ 598.695565] env[61962]: ERROR nova.compute.manager [instance: 06028841-b941-4352-b5c1-fa71cc87a4a7] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 598.695565] env[61962]: ERROR nova.compute.manager [instance: 06028841-b941-4352-b5c1-fa71cc87a4a7] return self.greenlet.switch() [ 598.695932] env[61962]: ERROR nova.compute.manager [instance: 06028841-b941-4352-b5c1-fa71cc87a4a7] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 598.695932] env[61962]: ERROR nova.compute.manager [instance: 06028841-b941-4352-b5c1-fa71cc87a4a7] self.f(*self.args, **self.kw) [ 598.695932] env[61962]: ERROR nova.compute.manager [instance: 06028841-b941-4352-b5c1-fa71cc87a4a7] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 598.695932] env[61962]: ERROR nova.compute.manager [instance: 06028841-b941-4352-b5c1-fa71cc87a4a7] raise exceptions.translate_fault(task_info.error) [ 598.695932] env[61962]: ERROR nova.compute.manager [instance: 06028841-b941-4352-b5c1-fa71cc87a4a7] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 598.695932] env[61962]: ERROR nova.compute.manager [instance: 06028841-b941-4352-b5c1-fa71cc87a4a7] Faults: ['InvalidArgument'] [ 598.695932] env[61962]: ERROR nova.compute.manager [instance: 06028841-b941-4352-b5c1-fa71cc87a4a7] [ 598.695932] env[61962]: INFO nova.compute.manager [None req-98f60c33-8db1-481d-81f0-6e45672909f4 tempest-ServerDiagnosticsV248Test-33376295 tempest-ServerDiagnosticsV248Test-33376295-project-member] [instance: 06028841-b941-4352-b5c1-fa71cc87a4a7] Terminating instance [ 598.697417] env[61962]: DEBUG oslo_concurrency.lockutils [None req-fbaae0eb-893e-49a1-93b8-ba5ae997b76b tempest-ServerDiagnosticsTest-151015285 tempest-ServerDiagnosticsTest-151015285-project-member] Acquired lock "[datastore2] devstack-image-cache_base/f684bb17-3ab2-4bd5-a19e-4c36c57d0ebe/f684bb17-3ab2-4bd5-a19e-4c36c57d0ebe.vmdk" {{(pid=61962) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 598.698387] env[61962]: DEBUG nova.virt.vmwareapi.ds_util [None req-fbaae0eb-893e-49a1-93b8-ba5ae997b76b tempest-ServerDiagnosticsTest-151015285 tempest-ServerDiagnosticsTest-151015285-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=61962) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 598.698387] env[61962]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-4f121210-dd0a-4f3d-a5a0-f54ef7c67ad3 {{(pid=61962) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 598.704589] env[61962]: DEBUG oslo_concurrency.lockutils [None req-98f60c33-8db1-481d-81f0-6e45672909f4 tempest-ServerDiagnosticsV248Test-33376295 tempest-ServerDiagnosticsV248Test-33376295-project-member] Acquiring lock "refresh_cache-06028841-b941-4352-b5c1-fa71cc87a4a7" {{(pid=61962) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 598.704589] env[61962]: DEBUG oslo_concurrency.lockutils [None req-98f60c33-8db1-481d-81f0-6e45672909f4 tempest-ServerDiagnosticsV248Test-33376295 tempest-ServerDiagnosticsV248Test-33376295-project-member] Acquired lock "refresh_cache-06028841-b941-4352-b5c1-fa71cc87a4a7" {{(pid=61962) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 598.704589] env[61962]: DEBUG nova.network.neutron [None req-98f60c33-8db1-481d-81f0-6e45672909f4 tempest-ServerDiagnosticsV248Test-33376295 tempest-ServerDiagnosticsV248Test-33376295-project-member] [instance: 06028841-b941-4352-b5c1-fa71cc87a4a7] Building network info cache for instance {{(pid=61962) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 598.716962] env[61962]: DEBUG nova.virt.vmwareapi.ds_util [None req-fbaae0eb-893e-49a1-93b8-ba5ae997b76b tempest-ServerDiagnosticsTest-151015285 tempest-ServerDiagnosticsTest-151015285-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=61962) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 598.717227] env[61962]: DEBUG nova.virt.vmwareapi.vmops [None req-fbaae0eb-893e-49a1-93b8-ba5ae997b76b tempest-ServerDiagnosticsTest-151015285 tempest-ServerDiagnosticsTest-151015285-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=61962) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 598.718094] env[61962]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-e0cb11bd-bd38-4419-bc99-79275dae4586 {{(pid=61962) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 598.724982] env[61962]: DEBUG oslo_vmware.api [None req-fbaae0eb-893e-49a1-93b8-ba5ae997b76b tempest-ServerDiagnosticsTest-151015285 tempest-ServerDiagnosticsTest-151015285-project-member] Waiting for the task: (returnval){ [ 598.724982] env[61962]: value = "session[5210918f-aadc-9b29-42fa-0929c2e42627]5242b802-322f-c9d4-2127-fce9c328dd01" [ 598.724982] env[61962]: _type = "Task" [ 598.724982] env[61962]: } to complete. {{(pid=61962) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 598.730290] env[61962]: DEBUG nova.network.neutron [None req-98f60c33-8db1-481d-81f0-6e45672909f4 tempest-ServerDiagnosticsV248Test-33376295 tempest-ServerDiagnosticsV248Test-33376295-project-member] [instance: 06028841-b941-4352-b5c1-fa71cc87a4a7] Instance cache missing network info. {{(pid=61962) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 598.736116] env[61962]: DEBUG oslo_vmware.api [None req-fbaae0eb-893e-49a1-93b8-ba5ae997b76b tempest-ServerDiagnosticsTest-151015285 tempest-ServerDiagnosticsTest-151015285-project-member] Task: {'id': session[5210918f-aadc-9b29-42fa-0929c2e42627]5242b802-322f-c9d4-2127-fce9c328dd01, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61962) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 598.870270] env[61962]: DEBUG nova.network.neutron [None req-98f60c33-8db1-481d-81f0-6e45672909f4 tempest-ServerDiagnosticsV248Test-33376295 tempest-ServerDiagnosticsV248Test-33376295-project-member] [instance: 06028841-b941-4352-b5c1-fa71cc87a4a7] Updating instance_info_cache with network_info: [] {{(pid=61962) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 598.883571] env[61962]: DEBUG oslo_concurrency.lockutils [None req-98f60c33-8db1-481d-81f0-6e45672909f4 tempest-ServerDiagnosticsV248Test-33376295 tempest-ServerDiagnosticsV248Test-33376295-project-member] Releasing lock "refresh_cache-06028841-b941-4352-b5c1-fa71cc87a4a7" {{(pid=61962) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 598.884090] env[61962]: DEBUG nova.compute.manager [None req-98f60c33-8db1-481d-81f0-6e45672909f4 tempest-ServerDiagnosticsV248Test-33376295 tempest-ServerDiagnosticsV248Test-33376295-project-member] [instance: 06028841-b941-4352-b5c1-fa71cc87a4a7] Start destroying the instance on the hypervisor. {{(pid=61962) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3142}} [ 598.884363] env[61962]: DEBUG nova.virt.vmwareapi.vmops [None req-98f60c33-8db1-481d-81f0-6e45672909f4 tempest-ServerDiagnosticsV248Test-33376295 tempest-ServerDiagnosticsV248Test-33376295-project-member] [instance: 06028841-b941-4352-b5c1-fa71cc87a4a7] Destroying instance {{(pid=61962) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 598.885578] env[61962]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f935f07d-9789-41c6-b06f-774febde4b58 {{(pid=61962) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 598.895171] env[61962]: DEBUG nova.virt.vmwareapi.vmops [None req-98f60c33-8db1-481d-81f0-6e45672909f4 tempest-ServerDiagnosticsV248Test-33376295 tempest-ServerDiagnosticsV248Test-33376295-project-member] [instance: 06028841-b941-4352-b5c1-fa71cc87a4a7] Unregistering the VM {{(pid=61962) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 598.897180] env[61962]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-ad437841-be9d-4938-9e86-9ec1521c2e5f {{(pid=61962) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 598.932305] env[61962]: DEBUG nova.virt.vmwareapi.vmops [None req-98f60c33-8db1-481d-81f0-6e45672909f4 tempest-ServerDiagnosticsV248Test-33376295 tempest-ServerDiagnosticsV248Test-33376295-project-member] [instance: 06028841-b941-4352-b5c1-fa71cc87a4a7] Unregistered the VM {{(pid=61962) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 598.932305] env[61962]: DEBUG nova.virt.vmwareapi.vmops [None req-98f60c33-8db1-481d-81f0-6e45672909f4 tempest-ServerDiagnosticsV248Test-33376295 tempest-ServerDiagnosticsV248Test-33376295-project-member] [instance: 06028841-b941-4352-b5c1-fa71cc87a4a7] Deleting contents of the VM from datastore datastore2 {{(pid=61962) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 598.932305] env[61962]: DEBUG nova.virt.vmwareapi.ds_util [None req-98f60c33-8db1-481d-81f0-6e45672909f4 tempest-ServerDiagnosticsV248Test-33376295 tempest-ServerDiagnosticsV248Test-33376295-project-member] Deleting the datastore file [datastore2] 06028841-b941-4352-b5c1-fa71cc87a4a7 {{(pid=61962) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 598.932305] env[61962]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-cde79b99-db0e-4cf1-8288-be348ca360dd {{(pid=61962) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 598.941067] env[61962]: DEBUG oslo_vmware.api [None req-98f60c33-8db1-481d-81f0-6e45672909f4 tempest-ServerDiagnosticsV248Test-33376295 tempest-ServerDiagnosticsV248Test-33376295-project-member] Waiting for the task: (returnval){ [ 598.941067] env[61962]: value = "task-4891940" [ 598.941067] env[61962]: _type = "Task" [ 598.941067] env[61962]: } to complete. {{(pid=61962) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 598.948622] env[61962]: DEBUG oslo_vmware.api [None req-98f60c33-8db1-481d-81f0-6e45672909f4 tempest-ServerDiagnosticsV248Test-33376295 tempest-ServerDiagnosticsV248Test-33376295-project-member] Task: {'id': task-4891940, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61962) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 599.241131] env[61962]: DEBUG nova.virt.vmwareapi.vmops [None req-fbaae0eb-893e-49a1-93b8-ba5ae997b76b tempest-ServerDiagnosticsTest-151015285 tempest-ServerDiagnosticsTest-151015285-project-member] [instance: 495acb0f-5b45-457e-9218-db57a3612289] Preparing fetch location {{(pid=61962) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 599.243517] env[61962]: DEBUG nova.virt.vmwareapi.ds_util [None req-fbaae0eb-893e-49a1-93b8-ba5ae997b76b tempest-ServerDiagnosticsTest-151015285 tempest-ServerDiagnosticsTest-151015285-project-member] Creating directory with path [datastore2] vmware_temp/b099c35d-ae6a-49f2-9586-0c446aa97e67/f684bb17-3ab2-4bd5-a19e-4c36c57d0ebe {{(pid=61962) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 599.244693] env[61962]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-e8712984-a0d2-40d0-a009-7da562004eef {{(pid=61962) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 599.267444] env[61962]: DEBUG nova.virt.vmwareapi.ds_util [None req-fbaae0eb-893e-49a1-93b8-ba5ae997b76b tempest-ServerDiagnosticsTest-151015285 tempest-ServerDiagnosticsTest-151015285-project-member] Created directory with path [datastore2] vmware_temp/b099c35d-ae6a-49f2-9586-0c446aa97e67/f684bb17-3ab2-4bd5-a19e-4c36c57d0ebe {{(pid=61962) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 599.267444] env[61962]: DEBUG nova.virt.vmwareapi.vmops [None req-fbaae0eb-893e-49a1-93b8-ba5ae997b76b tempest-ServerDiagnosticsTest-151015285 tempest-ServerDiagnosticsTest-151015285-project-member] [instance: 495acb0f-5b45-457e-9218-db57a3612289] Fetch image to [datastore2] vmware_temp/b099c35d-ae6a-49f2-9586-0c446aa97e67/f684bb17-3ab2-4bd5-a19e-4c36c57d0ebe/tmp-sparse.vmdk {{(pid=61962) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 599.267444] env[61962]: DEBUG nova.virt.vmwareapi.vmops [None req-fbaae0eb-893e-49a1-93b8-ba5ae997b76b tempest-ServerDiagnosticsTest-151015285 tempest-ServerDiagnosticsTest-151015285-project-member] [instance: 495acb0f-5b45-457e-9218-db57a3612289] Downloading image file data f684bb17-3ab2-4bd5-a19e-4c36c57d0ebe to [datastore2] vmware_temp/b099c35d-ae6a-49f2-9586-0c446aa97e67/f684bb17-3ab2-4bd5-a19e-4c36c57d0ebe/tmp-sparse.vmdk on the data store datastore2 {{(pid=61962) _fetch_image_as_file /opt/stack/nova/nova/virt/vmwareapi/vmops.py:399}} [ 599.267444] env[61962]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4ed26ae1-47ae-42f2-a141-0b2d44f353d8 {{(pid=61962) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 599.274576] env[61962]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-33ed28f9-f998-4bc2-a2a2-4cc265537bc6 {{(pid=61962) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 599.292736] env[61962]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-aef65e57-3dbd-4af7-a106-b86ad569c0d4 {{(pid=61962) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 599.341426] env[61962]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ddc79a7f-1d91-4b3d-802b-7bb249f50a9a {{(pid=61962) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 599.345612] env[61962]: DEBUG oslo_concurrency.lockutils [None req-7546295f-5c69-4e76-85a0-ed1a4d4adbfc tempest-ServerRescueTestJSON-985981031 tempest-ServerRescueTestJSON-985981031-project-member] Acquiring lock "ff7d9da5-bc9a-41f2-a2e4-521d87b22117" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61962) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 599.345847] env[61962]: DEBUG oslo_concurrency.lockutils [None req-7546295f-5c69-4e76-85a0-ed1a4d4adbfc tempest-ServerRescueTestJSON-985981031 tempest-ServerRescueTestJSON-985981031-project-member] Lock "ff7d9da5-bc9a-41f2-a2e4-521d87b22117" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61962) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 599.350704] env[61962]: DEBUG oslo_vmware.service [-] Invoking SessionManager.AcquireGenericServiceTicket with opID=oslo.vmware-7dc2086e-9ed2-4640-acf7-a049acbf8eac {{(pid=61962) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 599.375155] env[61962]: DEBUG nova.virt.vmwareapi.images [None req-fbaae0eb-893e-49a1-93b8-ba5ae997b76b tempest-ServerDiagnosticsTest-151015285 tempest-ServerDiagnosticsTest-151015285-project-member] [instance: 495acb0f-5b45-457e-9218-db57a3612289] Downloading image file data f684bb17-3ab2-4bd5-a19e-4c36c57d0ebe to the data store datastore2 {{(pid=61962) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:245}} [ 599.441268] env[61962]: DEBUG oslo_vmware.rw_handles [None req-fbaae0eb-893e-49a1-93b8-ba5ae997b76b tempest-ServerDiagnosticsTest-151015285 tempest-ServerDiagnosticsTest-151015285-project-member] Creating HTTP connection to write to file with size = 21318656 and URL = https://esx7c1n3.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/b099c35d-ae6a-49f2-9586-0c446aa97e67/f684bb17-3ab2-4bd5-a19e-4c36c57d0ebe/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=61962) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 599.507422] env[61962]: DEBUG oslo_vmware.api [None req-98f60c33-8db1-481d-81f0-6e45672909f4 tempest-ServerDiagnosticsV248Test-33376295 tempest-ServerDiagnosticsV248Test-33376295-project-member] Task: {'id': task-4891940, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.036488} completed successfully. {{(pid=61962) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 599.509406] env[61962]: DEBUG nova.virt.vmwareapi.ds_util [None req-98f60c33-8db1-481d-81f0-6e45672909f4 tempest-ServerDiagnosticsV248Test-33376295 tempest-ServerDiagnosticsV248Test-33376295-project-member] Deleted the datastore file {{(pid=61962) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 599.509592] env[61962]: DEBUG nova.virt.vmwareapi.vmops [None req-98f60c33-8db1-481d-81f0-6e45672909f4 tempest-ServerDiagnosticsV248Test-33376295 tempest-ServerDiagnosticsV248Test-33376295-project-member] [instance: 06028841-b941-4352-b5c1-fa71cc87a4a7] Deleted contents of the VM from datastore datastore2 {{(pid=61962) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 599.509874] env[61962]: DEBUG nova.virt.vmwareapi.vmops [None req-98f60c33-8db1-481d-81f0-6e45672909f4 tempest-ServerDiagnosticsV248Test-33376295 tempest-ServerDiagnosticsV248Test-33376295-project-member] [instance: 06028841-b941-4352-b5c1-fa71cc87a4a7] Instance destroyed {{(pid=61962) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 599.510324] env[61962]: INFO nova.compute.manager [None req-98f60c33-8db1-481d-81f0-6e45672909f4 tempest-ServerDiagnosticsV248Test-33376295 tempest-ServerDiagnosticsV248Test-33376295-project-member] [instance: 06028841-b941-4352-b5c1-fa71cc87a4a7] Took 0.63 seconds to destroy the instance on the hypervisor. [ 599.510575] env[61962]: DEBUG oslo.service.loopingcall [None req-98f60c33-8db1-481d-81f0-6e45672909f4 tempest-ServerDiagnosticsV248Test-33376295 tempest-ServerDiagnosticsV248Test-33376295-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61962) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 599.511215] env[61962]: DEBUG oslo_vmware.rw_handles [None req-fbaae0eb-893e-49a1-93b8-ba5ae997b76b tempest-ServerDiagnosticsTest-151015285 tempest-ServerDiagnosticsTest-151015285-project-member] Completed reading data from the image iterator. {{(pid=61962) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 599.514025] env[61962]: DEBUG oslo_vmware.rw_handles [None req-fbaae0eb-893e-49a1-93b8-ba5ae997b76b tempest-ServerDiagnosticsTest-151015285 tempest-ServerDiagnosticsTest-151015285-project-member] Closing write handle for https://esx7c1n3.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/b099c35d-ae6a-49f2-9586-0c446aa97e67/f684bb17-3ab2-4bd5-a19e-4c36c57d0ebe/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=61962) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:281}} [ 599.514025] env[61962]: DEBUG nova.compute.manager [-] [instance: 06028841-b941-4352-b5c1-fa71cc87a4a7] Skipping network deallocation for instance since networking was not requested. {{(pid=61962) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2277}} [ 599.514482] env[61962]: DEBUG nova.compute.claims [None req-98f60c33-8db1-481d-81f0-6e45672909f4 tempest-ServerDiagnosticsV248Test-33376295 tempest-ServerDiagnosticsV248Test-33376295-project-member] [instance: 06028841-b941-4352-b5c1-fa71cc87a4a7] Aborting claim: {{(pid=61962) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 599.515374] env[61962]: DEBUG oslo_concurrency.lockutils [None req-98f60c33-8db1-481d-81f0-6e45672909f4 tempest-ServerDiagnosticsV248Test-33376295 tempest-ServerDiagnosticsV248Test-33376295-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=61962) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 599.515374] env[61962]: DEBUG oslo_concurrency.lockutils [None req-98f60c33-8db1-481d-81f0-6e45672909f4 tempest-ServerDiagnosticsV248Test-33376295 tempest-ServerDiagnosticsV248Test-33376295-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 0.000s {{(pid=61962) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 599.917963] env[61962]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-392d6ca3-c3af-49fe-8bad-fc5c2557fb64 {{(pid=61962) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 599.931918] env[61962]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e673a2e9-9546-4551-9d43-48c36b1e6a50 {{(pid=61962) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 599.975568] env[61962]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-18fad04f-9704-48de-95d9-06992c843a6b {{(pid=61962) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 599.985524] env[61962]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-16724d6b-e428-4ee7-ab3e-db97fefc2fe9 {{(pid=61962) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 600.000645] env[61962]: DEBUG nova.compute.provider_tree [None req-98f60c33-8db1-481d-81f0-6e45672909f4 tempest-ServerDiagnosticsV248Test-33376295 tempest-ServerDiagnosticsV248Test-33376295-project-member] Inventory has not changed in ProviderTree for provider: 5a20dc57-fddd-49ec-bab5-953a03eebaa1 {{(pid=61962) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 600.015199] env[61962]: DEBUG nova.scheduler.client.report [None req-98f60c33-8db1-481d-81f0-6e45672909f4 tempest-ServerDiagnosticsV248Test-33376295 tempest-ServerDiagnosticsV248Test-33376295-project-member] Inventory has not changed for provider 5a20dc57-fddd-49ec-bab5-953a03eebaa1 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 96, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61962) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 600.036713] env[61962]: DEBUG oslo_concurrency.lockutils [None req-98f60c33-8db1-481d-81f0-6e45672909f4 tempest-ServerDiagnosticsV248Test-33376295 tempest-ServerDiagnosticsV248Test-33376295-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 0.522s {{(pid=61962) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 600.037297] env[61962]: ERROR nova.compute.manager [None req-98f60c33-8db1-481d-81f0-6e45672909f4 tempest-ServerDiagnosticsV248Test-33376295 tempest-ServerDiagnosticsV248Test-33376295-project-member] [instance: 06028841-b941-4352-b5c1-fa71cc87a4a7] Failed to build and run instance: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 600.037297] env[61962]: Faults: ['InvalidArgument'] [ 600.037297] env[61962]: ERROR nova.compute.manager [instance: 06028841-b941-4352-b5c1-fa71cc87a4a7] Traceback (most recent call last): [ 600.037297] env[61962]: ERROR nova.compute.manager [instance: 06028841-b941-4352-b5c1-fa71cc87a4a7] File "/opt/stack/nova/nova/compute/manager.py", line 2633, in _build_and_run_instance [ 600.037297] env[61962]: ERROR nova.compute.manager [instance: 06028841-b941-4352-b5c1-fa71cc87a4a7] self.driver.spawn(context, instance, image_meta, [ 600.037297] env[61962]: ERROR nova.compute.manager [instance: 06028841-b941-4352-b5c1-fa71cc87a4a7] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 600.037297] env[61962]: ERROR nova.compute.manager [instance: 06028841-b941-4352-b5c1-fa71cc87a4a7] self._vmops.spawn(context, instance, image_meta, injected_files, [ 600.037297] env[61962]: ERROR nova.compute.manager [instance: 06028841-b941-4352-b5c1-fa71cc87a4a7] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 600.037297] env[61962]: ERROR nova.compute.manager [instance: 06028841-b941-4352-b5c1-fa71cc87a4a7] self._fetch_image_if_missing(context, vi) [ 600.037297] env[61962]: ERROR nova.compute.manager [instance: 06028841-b941-4352-b5c1-fa71cc87a4a7] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 600.037297] env[61962]: ERROR nova.compute.manager [instance: 06028841-b941-4352-b5c1-fa71cc87a4a7] image_cache(vi, tmp_image_ds_loc) [ 600.037297] env[61962]: ERROR nova.compute.manager [instance: 06028841-b941-4352-b5c1-fa71cc87a4a7] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 600.038051] env[61962]: ERROR nova.compute.manager [instance: 06028841-b941-4352-b5c1-fa71cc87a4a7] vm_util.copy_virtual_disk( [ 600.038051] env[61962]: ERROR nova.compute.manager [instance: 06028841-b941-4352-b5c1-fa71cc87a4a7] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 600.038051] env[61962]: ERROR nova.compute.manager [instance: 06028841-b941-4352-b5c1-fa71cc87a4a7] session._wait_for_task(vmdk_copy_task) [ 600.038051] env[61962]: ERROR nova.compute.manager [instance: 06028841-b941-4352-b5c1-fa71cc87a4a7] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 600.038051] env[61962]: ERROR nova.compute.manager [instance: 06028841-b941-4352-b5c1-fa71cc87a4a7] return self.wait_for_task(task_ref) [ 600.038051] env[61962]: ERROR nova.compute.manager [instance: 06028841-b941-4352-b5c1-fa71cc87a4a7] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 600.038051] env[61962]: ERROR nova.compute.manager [instance: 06028841-b941-4352-b5c1-fa71cc87a4a7] return evt.wait() [ 600.038051] env[61962]: ERROR nova.compute.manager [instance: 06028841-b941-4352-b5c1-fa71cc87a4a7] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 600.038051] env[61962]: ERROR nova.compute.manager [instance: 06028841-b941-4352-b5c1-fa71cc87a4a7] result = hub.switch() [ 600.038051] env[61962]: ERROR nova.compute.manager [instance: 06028841-b941-4352-b5c1-fa71cc87a4a7] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 600.038051] env[61962]: ERROR nova.compute.manager [instance: 06028841-b941-4352-b5c1-fa71cc87a4a7] return self.greenlet.switch() [ 600.038051] env[61962]: ERROR nova.compute.manager [instance: 06028841-b941-4352-b5c1-fa71cc87a4a7] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 600.038051] env[61962]: ERROR nova.compute.manager [instance: 06028841-b941-4352-b5c1-fa71cc87a4a7] self.f(*self.args, **self.kw) [ 600.038751] env[61962]: ERROR nova.compute.manager [instance: 06028841-b941-4352-b5c1-fa71cc87a4a7] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 600.038751] env[61962]: ERROR nova.compute.manager [instance: 06028841-b941-4352-b5c1-fa71cc87a4a7] raise exceptions.translate_fault(task_info.error) [ 600.038751] env[61962]: ERROR nova.compute.manager [instance: 06028841-b941-4352-b5c1-fa71cc87a4a7] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 600.038751] env[61962]: ERROR nova.compute.manager [instance: 06028841-b941-4352-b5c1-fa71cc87a4a7] Faults: ['InvalidArgument'] [ 600.038751] env[61962]: ERROR nova.compute.manager [instance: 06028841-b941-4352-b5c1-fa71cc87a4a7] [ 600.038751] env[61962]: DEBUG nova.compute.utils [None req-98f60c33-8db1-481d-81f0-6e45672909f4 tempest-ServerDiagnosticsV248Test-33376295 tempest-ServerDiagnosticsV248Test-33376295-project-member] [instance: 06028841-b941-4352-b5c1-fa71cc87a4a7] VimFaultException {{(pid=61962) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 600.040779] env[61962]: DEBUG nova.compute.manager [None req-98f60c33-8db1-481d-81f0-6e45672909f4 tempest-ServerDiagnosticsV248Test-33376295 tempest-ServerDiagnosticsV248Test-33376295-project-member] [instance: 06028841-b941-4352-b5c1-fa71cc87a4a7] Build of instance 06028841-b941-4352-b5c1-fa71cc87a4a7 was re-scheduled: A specified parameter was not correct: fileType [ 600.040779] env[61962]: Faults: ['InvalidArgument'] {{(pid=61962) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2472}} [ 600.041739] env[61962]: DEBUG nova.compute.manager [None req-98f60c33-8db1-481d-81f0-6e45672909f4 tempest-ServerDiagnosticsV248Test-33376295 tempest-ServerDiagnosticsV248Test-33376295-project-member] [instance: 06028841-b941-4352-b5c1-fa71cc87a4a7] Unplugging VIFs for instance {{(pid=61962) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:2998}} [ 600.041739] env[61962]: DEBUG oslo_concurrency.lockutils [None req-98f60c33-8db1-481d-81f0-6e45672909f4 tempest-ServerDiagnosticsV248Test-33376295 tempest-ServerDiagnosticsV248Test-33376295-project-member] Acquiring lock "refresh_cache-06028841-b941-4352-b5c1-fa71cc87a4a7" {{(pid=61962) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 600.041739] env[61962]: DEBUG oslo_concurrency.lockutils [None req-98f60c33-8db1-481d-81f0-6e45672909f4 tempest-ServerDiagnosticsV248Test-33376295 tempest-ServerDiagnosticsV248Test-33376295-project-member] Acquired lock "refresh_cache-06028841-b941-4352-b5c1-fa71cc87a4a7" {{(pid=61962) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 600.041739] env[61962]: DEBUG nova.network.neutron [None req-98f60c33-8db1-481d-81f0-6e45672909f4 tempest-ServerDiagnosticsV248Test-33376295 tempest-ServerDiagnosticsV248Test-33376295-project-member] [instance: 06028841-b941-4352-b5c1-fa71cc87a4a7] Building network info cache for instance {{(pid=61962) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 600.075995] env[61962]: DEBUG nova.network.neutron [None req-98f60c33-8db1-481d-81f0-6e45672909f4 tempest-ServerDiagnosticsV248Test-33376295 tempest-ServerDiagnosticsV248Test-33376295-project-member] [instance: 06028841-b941-4352-b5c1-fa71cc87a4a7] Instance cache missing network info. {{(pid=61962) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 600.220883] env[61962]: DEBUG nova.network.neutron [None req-98f60c33-8db1-481d-81f0-6e45672909f4 tempest-ServerDiagnosticsV248Test-33376295 tempest-ServerDiagnosticsV248Test-33376295-project-member] [instance: 06028841-b941-4352-b5c1-fa71cc87a4a7] Updating instance_info_cache with network_info: [] {{(pid=61962) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 600.244880] env[61962]: DEBUG oslo_concurrency.lockutils [None req-98f60c33-8db1-481d-81f0-6e45672909f4 tempest-ServerDiagnosticsV248Test-33376295 tempest-ServerDiagnosticsV248Test-33376295-project-member] Releasing lock "refresh_cache-06028841-b941-4352-b5c1-fa71cc87a4a7" {{(pid=61962) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 600.245390] env[61962]: DEBUG nova.compute.manager [None req-98f60c33-8db1-481d-81f0-6e45672909f4 tempest-ServerDiagnosticsV248Test-33376295 tempest-ServerDiagnosticsV248Test-33376295-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=61962) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3021}} [ 600.245390] env[61962]: DEBUG nova.compute.manager [None req-98f60c33-8db1-481d-81f0-6e45672909f4 tempest-ServerDiagnosticsV248Test-33376295 tempest-ServerDiagnosticsV248Test-33376295-project-member] [instance: 06028841-b941-4352-b5c1-fa71cc87a4a7] Skipping network deallocation for instance since networking was not requested. {{(pid=61962) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2277}} [ 600.404425] env[61962]: INFO nova.scheduler.client.report [None req-98f60c33-8db1-481d-81f0-6e45672909f4 tempest-ServerDiagnosticsV248Test-33376295 tempest-ServerDiagnosticsV248Test-33376295-project-member] Deleted allocations for instance 06028841-b941-4352-b5c1-fa71cc87a4a7 [ 600.447764] env[61962]: DEBUG oslo_concurrency.lockutils [None req-98f60c33-8db1-481d-81f0-6e45672909f4 tempest-ServerDiagnosticsV248Test-33376295 tempest-ServerDiagnosticsV248Test-33376295-project-member] Lock "06028841-b941-4352-b5c1-fa71cc87a4a7" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 52.031s {{(pid=61962) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 600.471114] env[61962]: DEBUG nova.compute.manager [None req-4f0380d0-0d3b-4e8d-a9a6-09d47c765b7b tempest-VolumesAssistedSnapshotsTest-1134839840 tempest-VolumesAssistedSnapshotsTest-1134839840-project-member] [instance: 295064f0-9677-4287-877a-ef2a33a87fef] Starting instance... {{(pid=61962) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2424}} [ 600.534723] env[61962]: DEBUG oslo_concurrency.lockutils [None req-4f0380d0-0d3b-4e8d-a9a6-09d47c765b7b tempest-VolumesAssistedSnapshotsTest-1134839840 tempest-VolumesAssistedSnapshotsTest-1134839840-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61962) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 600.535069] env[61962]: DEBUG oslo_concurrency.lockutils [None req-4f0380d0-0d3b-4e8d-a9a6-09d47c765b7b tempest-VolumesAssistedSnapshotsTest-1134839840 tempest-VolumesAssistedSnapshotsTest-1134839840-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=61962) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 600.536761] env[61962]: INFO nova.compute.claims [None req-4f0380d0-0d3b-4e8d-a9a6-09d47c765b7b tempest-VolumesAssistedSnapshotsTest-1134839840 tempest-VolumesAssistedSnapshotsTest-1134839840-project-member] [instance: 295064f0-9677-4287-877a-ef2a33a87fef] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 600.987344] env[61962]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-12d0a64e-3940-4f44-9d91-d8bcf967527d {{(pid=61962) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 600.995755] env[61962]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b5e8559b-ce19-4818-b4a6-3c457f8354b5 {{(pid=61962) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 601.031966] env[61962]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8b152a9d-5bc9-4cef-9215-23b2de301e20 {{(pid=61962) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 601.040457] env[61962]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6cabfe54-77b0-4813-9b46-8084fe3d60c8 {{(pid=61962) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 601.055804] env[61962]: DEBUG nova.compute.provider_tree [None req-4f0380d0-0d3b-4e8d-a9a6-09d47c765b7b tempest-VolumesAssistedSnapshotsTest-1134839840 tempest-VolumesAssistedSnapshotsTest-1134839840-project-member] Inventory has not changed in ProviderTree for provider: 5a20dc57-fddd-49ec-bab5-953a03eebaa1 {{(pid=61962) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 601.067406] env[61962]: DEBUG nova.scheduler.client.report [None req-4f0380d0-0d3b-4e8d-a9a6-09d47c765b7b tempest-VolumesAssistedSnapshotsTest-1134839840 tempest-VolumesAssistedSnapshotsTest-1134839840-project-member] Inventory has not changed for provider 5a20dc57-fddd-49ec-bab5-953a03eebaa1 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 96, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61962) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 601.091607] env[61962]: DEBUG oslo_concurrency.lockutils [None req-4f0380d0-0d3b-4e8d-a9a6-09d47c765b7b tempest-VolumesAssistedSnapshotsTest-1134839840 tempest-VolumesAssistedSnapshotsTest-1134839840-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.556s {{(pid=61962) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 601.095358] env[61962]: DEBUG nova.compute.manager [None req-4f0380d0-0d3b-4e8d-a9a6-09d47c765b7b tempest-VolumesAssistedSnapshotsTest-1134839840 tempest-VolumesAssistedSnapshotsTest-1134839840-project-member] [instance: 295064f0-9677-4287-877a-ef2a33a87fef] Start building networks asynchronously for instance. {{(pid=61962) _build_resources /opt/stack/nova/nova/compute/manager.py:2821}} [ 601.145530] env[61962]: DEBUG nova.compute.utils [None req-4f0380d0-0d3b-4e8d-a9a6-09d47c765b7b tempest-VolumesAssistedSnapshotsTest-1134839840 tempest-VolumesAssistedSnapshotsTest-1134839840-project-member] Using /dev/sd instead of None {{(pid=61962) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 601.146827] env[61962]: DEBUG nova.compute.manager [None req-4f0380d0-0d3b-4e8d-a9a6-09d47c765b7b tempest-VolumesAssistedSnapshotsTest-1134839840 tempest-VolumesAssistedSnapshotsTest-1134839840-project-member] [instance: 295064f0-9677-4287-877a-ef2a33a87fef] Allocating IP information in the background. {{(pid=61962) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1973}} [ 601.146989] env[61962]: DEBUG nova.network.neutron [None req-4f0380d0-0d3b-4e8d-a9a6-09d47c765b7b tempest-VolumesAssistedSnapshotsTest-1134839840 tempest-VolumesAssistedSnapshotsTest-1134839840-project-member] [instance: 295064f0-9677-4287-877a-ef2a33a87fef] allocate_for_instance() {{(pid=61962) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 601.178651] env[61962]: DEBUG nova.compute.manager [None req-4f0380d0-0d3b-4e8d-a9a6-09d47c765b7b tempest-VolumesAssistedSnapshotsTest-1134839840 tempest-VolumesAssistedSnapshotsTest-1134839840-project-member] [instance: 295064f0-9677-4287-877a-ef2a33a87fef] Start building block device mappings for instance. {{(pid=61962) _build_resources /opt/stack/nova/nova/compute/manager.py:2856}} [ 601.303436] env[61962]: DEBUG nova.compute.manager [None req-4f0380d0-0d3b-4e8d-a9a6-09d47c765b7b tempest-VolumesAssistedSnapshotsTest-1134839840 tempest-VolumesAssistedSnapshotsTest-1134839840-project-member] [instance: 295064f0-9677-4287-877a-ef2a33a87fef] Start spawning the instance on the hypervisor. {{(pid=61962) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2630}} [ 601.323213] env[61962]: DEBUG nova.policy [None req-4f0380d0-0d3b-4e8d-a9a6-09d47c765b7b tempest-VolumesAssistedSnapshotsTest-1134839840 tempest-VolumesAssistedSnapshotsTest-1134839840-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '6a5fdc2d236b4653b7b258f0859aa6af', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '78e44aa9cd5642928627210447b3e177', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61962) authorize /opt/stack/nova/nova/policy.py:203}} [ 601.346925] env[61962]: DEBUG nova.virt.hardware [None req-4f0380d0-0d3b-4e8d-a9a6-09d47c765b7b tempest-VolumesAssistedSnapshotsTest-1134839840 tempest-VolumesAssistedSnapshotsTest-1134839840-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-12-01T12:09:23Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=128,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-12-01T12:09:07Z,direct_url=,disk_format='vmdk',id=f684bb17-3ab2-4bd5-a19e-4c36c57d0ebe,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='eef556fb5c4f49b889491ae31a496de5',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-12-01T12:09:08Z,virtual_size=,visibility=), allow threads: False {{(pid=61962) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 601.347558] env[61962]: DEBUG nova.virt.hardware [None req-4f0380d0-0d3b-4e8d-a9a6-09d47c765b7b tempest-VolumesAssistedSnapshotsTest-1134839840 tempest-VolumesAssistedSnapshotsTest-1134839840-project-member] Flavor limits 0:0:0 {{(pid=61962) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 601.347558] env[61962]: DEBUG nova.virt.hardware [None req-4f0380d0-0d3b-4e8d-a9a6-09d47c765b7b tempest-VolumesAssistedSnapshotsTest-1134839840 tempest-VolumesAssistedSnapshotsTest-1134839840-project-member] Image limits 0:0:0 {{(pid=61962) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 601.347558] env[61962]: DEBUG nova.virt.hardware [None req-4f0380d0-0d3b-4e8d-a9a6-09d47c765b7b tempest-VolumesAssistedSnapshotsTest-1134839840 tempest-VolumesAssistedSnapshotsTest-1134839840-project-member] Flavor pref 0:0:0 {{(pid=61962) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 601.347755] env[61962]: DEBUG nova.virt.hardware [None req-4f0380d0-0d3b-4e8d-a9a6-09d47c765b7b tempest-VolumesAssistedSnapshotsTest-1134839840 tempest-VolumesAssistedSnapshotsTest-1134839840-project-member] Image pref 0:0:0 {{(pid=61962) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 601.347818] env[61962]: DEBUG nova.virt.hardware [None req-4f0380d0-0d3b-4e8d-a9a6-09d47c765b7b tempest-VolumesAssistedSnapshotsTest-1134839840 tempest-VolumesAssistedSnapshotsTest-1134839840-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61962) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 601.351363] env[61962]: DEBUG nova.virt.hardware [None req-4f0380d0-0d3b-4e8d-a9a6-09d47c765b7b tempest-VolumesAssistedSnapshotsTest-1134839840 tempest-VolumesAssistedSnapshotsTest-1134839840-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61962) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 601.351572] env[61962]: DEBUG nova.virt.hardware [None req-4f0380d0-0d3b-4e8d-a9a6-09d47c765b7b tempest-VolumesAssistedSnapshotsTest-1134839840 tempest-VolumesAssistedSnapshotsTest-1134839840-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61962) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 601.353505] env[61962]: DEBUG nova.virt.hardware [None req-4f0380d0-0d3b-4e8d-a9a6-09d47c765b7b tempest-VolumesAssistedSnapshotsTest-1134839840 tempest-VolumesAssistedSnapshotsTest-1134839840-project-member] Got 1 possible topologies {{(pid=61962) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 601.353701] env[61962]: DEBUG nova.virt.hardware [None req-4f0380d0-0d3b-4e8d-a9a6-09d47c765b7b tempest-VolumesAssistedSnapshotsTest-1134839840 tempest-VolumesAssistedSnapshotsTest-1134839840-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61962) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 601.354135] env[61962]: DEBUG nova.virt.hardware [None req-4f0380d0-0d3b-4e8d-a9a6-09d47c765b7b tempest-VolumesAssistedSnapshotsTest-1134839840 tempest-VolumesAssistedSnapshotsTest-1134839840-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61962) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 601.354867] env[61962]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-974c7266-5ac0-4aca-a44e-70bb825f23b9 {{(pid=61962) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 601.366846] env[61962]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8bd68e88-43e3-4a5e-90b3-a6fad018a5c4 {{(pid=61962) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 602.741242] env[61962]: DEBUG nova.network.neutron [None req-4f0380d0-0d3b-4e8d-a9a6-09d47c765b7b tempest-VolumesAssistedSnapshotsTest-1134839840 tempest-VolumesAssistedSnapshotsTest-1134839840-project-member] [instance: 295064f0-9677-4287-877a-ef2a33a87fef] Successfully created port: 5ddebb2f-8010-4827-a58c-7e5d659d38b7 {{(pid=61962) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 605.210523] env[61962]: DEBUG nova.network.neutron [None req-4f0380d0-0d3b-4e8d-a9a6-09d47c765b7b tempest-VolumesAssistedSnapshotsTest-1134839840 tempest-VolumesAssistedSnapshotsTest-1134839840-project-member] [instance: 295064f0-9677-4287-877a-ef2a33a87fef] Successfully updated port: 5ddebb2f-8010-4827-a58c-7e5d659d38b7 {{(pid=61962) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 605.252931] env[61962]: DEBUG oslo_concurrency.lockutils [None req-4f0380d0-0d3b-4e8d-a9a6-09d47c765b7b tempest-VolumesAssistedSnapshotsTest-1134839840 tempest-VolumesAssistedSnapshotsTest-1134839840-project-member] Acquiring lock "refresh_cache-295064f0-9677-4287-877a-ef2a33a87fef" {{(pid=61962) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 605.252931] env[61962]: DEBUG oslo_concurrency.lockutils [None req-4f0380d0-0d3b-4e8d-a9a6-09d47c765b7b tempest-VolumesAssistedSnapshotsTest-1134839840 tempest-VolumesAssistedSnapshotsTest-1134839840-project-member] Acquired lock "refresh_cache-295064f0-9677-4287-877a-ef2a33a87fef" {{(pid=61962) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 605.252931] env[61962]: DEBUG nova.network.neutron [None req-4f0380d0-0d3b-4e8d-a9a6-09d47c765b7b tempest-VolumesAssistedSnapshotsTest-1134839840 tempest-VolumesAssistedSnapshotsTest-1134839840-project-member] [instance: 295064f0-9677-4287-877a-ef2a33a87fef] Building network info cache for instance {{(pid=61962) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 605.379018] env[61962]: DEBUG nova.network.neutron [None req-4f0380d0-0d3b-4e8d-a9a6-09d47c765b7b tempest-VolumesAssistedSnapshotsTest-1134839840 tempest-VolumesAssistedSnapshotsTest-1134839840-project-member] [instance: 295064f0-9677-4287-877a-ef2a33a87fef] Instance cache missing network info. {{(pid=61962) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 606.210700] env[61962]: DEBUG nova.network.neutron [None req-4f0380d0-0d3b-4e8d-a9a6-09d47c765b7b tempest-VolumesAssistedSnapshotsTest-1134839840 tempest-VolumesAssistedSnapshotsTest-1134839840-project-member] [instance: 295064f0-9677-4287-877a-ef2a33a87fef] Updating instance_info_cache with network_info: [{"id": "5ddebb2f-8010-4827-a58c-7e5d659d38b7", "address": "fa:16:3e:5c:3a:3a", "network": {"id": "d39a5a72-777d-4972-9688-b7ffa8e0a953", "bridge": "br-int", "label": "tempest-VolumesAssistedSnapshotsTest-1997599418-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "78e44aa9cd5642928627210447b3e177", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "838c9497-35dd-415e-96c7-8dc21b0cd4b3", "external-id": "nsx-vlan-transportzone-530", "segmentation_id": 530, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap5ddebb2f-80", "ovs_interfaceid": "5ddebb2f-8010-4827-a58c-7e5d659d38b7", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61962) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 606.236986] env[61962]: DEBUG oslo_concurrency.lockutils [None req-4f0380d0-0d3b-4e8d-a9a6-09d47c765b7b tempest-VolumesAssistedSnapshotsTest-1134839840 tempest-VolumesAssistedSnapshotsTest-1134839840-project-member] Releasing lock "refresh_cache-295064f0-9677-4287-877a-ef2a33a87fef" {{(pid=61962) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 606.237333] env[61962]: DEBUG nova.compute.manager [None req-4f0380d0-0d3b-4e8d-a9a6-09d47c765b7b tempest-VolumesAssistedSnapshotsTest-1134839840 tempest-VolumesAssistedSnapshotsTest-1134839840-project-member] [instance: 295064f0-9677-4287-877a-ef2a33a87fef] Instance network_info: |[{"id": "5ddebb2f-8010-4827-a58c-7e5d659d38b7", "address": "fa:16:3e:5c:3a:3a", "network": {"id": "d39a5a72-777d-4972-9688-b7ffa8e0a953", "bridge": "br-int", "label": "tempest-VolumesAssistedSnapshotsTest-1997599418-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "78e44aa9cd5642928627210447b3e177", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "838c9497-35dd-415e-96c7-8dc21b0cd4b3", "external-id": "nsx-vlan-transportzone-530", "segmentation_id": 530, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap5ddebb2f-80", "ovs_interfaceid": "5ddebb2f-8010-4827-a58c-7e5d659d38b7", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=61962) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 606.237767] env[61962]: DEBUG nova.virt.vmwareapi.vmops [None req-4f0380d0-0d3b-4e8d-a9a6-09d47c765b7b tempest-VolumesAssistedSnapshotsTest-1134839840 tempest-VolumesAssistedSnapshotsTest-1134839840-project-member] [instance: 295064f0-9677-4287-877a-ef2a33a87fef] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:5c:3a:3a', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '838c9497-35dd-415e-96c7-8dc21b0cd4b3', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '5ddebb2f-8010-4827-a58c-7e5d659d38b7', 'vif_model': 'vmxnet3'}] {{(pid=61962) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 606.250083] env[61962]: DEBUG nova.virt.vmwareapi.vm_util [None req-4f0380d0-0d3b-4e8d-a9a6-09d47c765b7b tempest-VolumesAssistedSnapshotsTest-1134839840 tempest-VolumesAssistedSnapshotsTest-1134839840-project-member] Creating folder: Project (78e44aa9cd5642928627210447b3e177). Parent ref: group-v953327. {{(pid=61962) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 606.251040] env[61962]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-246b4651-38ac-434e-89a4-1f4bf913bada {{(pid=61962) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 606.269229] env[61962]: INFO nova.virt.vmwareapi.vm_util [None req-4f0380d0-0d3b-4e8d-a9a6-09d47c765b7b tempest-VolumesAssistedSnapshotsTest-1134839840 tempest-VolumesAssistedSnapshotsTest-1134839840-project-member] Created folder: Project (78e44aa9cd5642928627210447b3e177) in parent group-v953327. [ 606.269229] env[61962]: DEBUG nova.virt.vmwareapi.vm_util [None req-4f0380d0-0d3b-4e8d-a9a6-09d47c765b7b tempest-VolumesAssistedSnapshotsTest-1134839840 tempest-VolumesAssistedSnapshotsTest-1134839840-project-member] Creating folder: Instances. Parent ref: group-v953362. {{(pid=61962) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 606.269229] env[61962]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-a13aa32c-dac0-47f7-9096-30e6edf80bcd {{(pid=61962) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 606.283440] env[61962]: INFO nova.virt.vmwareapi.vm_util [None req-4f0380d0-0d3b-4e8d-a9a6-09d47c765b7b tempest-VolumesAssistedSnapshotsTest-1134839840 tempest-VolumesAssistedSnapshotsTest-1134839840-project-member] Created folder: Instances in parent group-v953362. [ 606.283709] env[61962]: DEBUG oslo.service.loopingcall [None req-4f0380d0-0d3b-4e8d-a9a6-09d47c765b7b tempest-VolumesAssistedSnapshotsTest-1134839840 tempest-VolumesAssistedSnapshotsTest-1134839840-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=61962) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 606.284466] env[61962]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 295064f0-9677-4287-877a-ef2a33a87fef] Creating VM on the ESX host {{(pid=61962) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 606.285682] env[61962]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-d52f187c-0c81-4961-96a6-f1eb45ef55a8 {{(pid=61962) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 606.308239] env[61962]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 606.308239] env[61962]: value = "task-4891945" [ 606.308239] env[61962]: _type = "Task" [ 606.308239] env[61962]: } to complete. {{(pid=61962) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 606.318337] env[61962]: DEBUG oslo_vmware.api [-] Task: {'id': task-4891945, 'name': CreateVM_Task} progress is 0%. {{(pid=61962) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 606.649297] env[61962]: DEBUG nova.compute.manager [req-047844c5-932c-440f-9300-49a74dca2a65 req-f105ae3c-97cb-41e5-a600-0773aa64b856 service nova] [instance: 295064f0-9677-4287-877a-ef2a33a87fef] Received event network-vif-plugged-5ddebb2f-8010-4827-a58c-7e5d659d38b7 {{(pid=61962) external_instance_event /opt/stack/nova/nova/compute/manager.py:11210}} [ 606.649567] env[61962]: DEBUG oslo_concurrency.lockutils [req-047844c5-932c-440f-9300-49a74dca2a65 req-f105ae3c-97cb-41e5-a600-0773aa64b856 service nova] Acquiring lock "295064f0-9677-4287-877a-ef2a33a87fef-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=61962) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 606.649836] env[61962]: DEBUG oslo_concurrency.lockutils [req-047844c5-932c-440f-9300-49a74dca2a65 req-f105ae3c-97cb-41e5-a600-0773aa64b856 service nova] Lock "295064f0-9677-4287-877a-ef2a33a87fef-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=61962) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 606.651279] env[61962]: DEBUG oslo_concurrency.lockutils [req-047844c5-932c-440f-9300-49a74dca2a65 req-f105ae3c-97cb-41e5-a600-0773aa64b856 service nova] Lock "295064f0-9677-4287-877a-ef2a33a87fef-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=61962) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 606.651279] env[61962]: DEBUG nova.compute.manager [req-047844c5-932c-440f-9300-49a74dca2a65 req-f105ae3c-97cb-41e5-a600-0773aa64b856 service nova] [instance: 295064f0-9677-4287-877a-ef2a33a87fef] No waiting events found dispatching network-vif-plugged-5ddebb2f-8010-4827-a58c-7e5d659d38b7 {{(pid=61962) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 606.651513] env[61962]: WARNING nova.compute.manager [req-047844c5-932c-440f-9300-49a74dca2a65 req-f105ae3c-97cb-41e5-a600-0773aa64b856 service nova] [instance: 295064f0-9677-4287-877a-ef2a33a87fef] Received unexpected event network-vif-plugged-5ddebb2f-8010-4827-a58c-7e5d659d38b7 for instance with vm_state building and task_state spawning. [ 606.818819] env[61962]: DEBUG oslo_vmware.api [-] Task: {'id': task-4891945, 'name': CreateVM_Task, 'duration_secs': 0.376468} completed successfully. {{(pid=61962) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 606.818988] env[61962]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 295064f0-9677-4287-877a-ef2a33a87fef] Created VM on the ESX host {{(pid=61962) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 606.819749] env[61962]: DEBUG oslo_concurrency.lockutils [None req-4f0380d0-0d3b-4e8d-a9a6-09d47c765b7b tempest-VolumesAssistedSnapshotsTest-1134839840 tempest-VolumesAssistedSnapshotsTest-1134839840-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/f684bb17-3ab2-4bd5-a19e-4c36c57d0ebe" {{(pid=61962) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 606.819903] env[61962]: DEBUG oslo_concurrency.lockutils [None req-4f0380d0-0d3b-4e8d-a9a6-09d47c765b7b tempest-VolumesAssistedSnapshotsTest-1134839840 tempest-VolumesAssistedSnapshotsTest-1134839840-project-member] Acquired lock "[datastore2] devstack-image-cache_base/f684bb17-3ab2-4bd5-a19e-4c36c57d0ebe" {{(pid=61962) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 606.820245] env[61962]: DEBUG oslo_concurrency.lockutils [None req-4f0380d0-0d3b-4e8d-a9a6-09d47c765b7b tempest-VolumesAssistedSnapshotsTest-1134839840 tempest-VolumesAssistedSnapshotsTest-1134839840-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/f684bb17-3ab2-4bd5-a19e-4c36c57d0ebe" {{(pid=61962) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 606.821929] env[61962]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-679e5ecd-2dbe-43d6-b868-c07dfcb16857 {{(pid=61962) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 606.826080] env[61962]: DEBUG oslo_vmware.api [None req-4f0380d0-0d3b-4e8d-a9a6-09d47c765b7b tempest-VolumesAssistedSnapshotsTest-1134839840 tempest-VolumesAssistedSnapshotsTest-1134839840-project-member] Waiting for the task: (returnval){ [ 606.826080] env[61962]: value = "session[5210918f-aadc-9b29-42fa-0929c2e42627]521a8582-b1b8-40f3-8d33-8804821e9ff0" [ 606.826080] env[61962]: _type = "Task" [ 606.826080] env[61962]: } to complete. {{(pid=61962) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 606.837753] env[61962]: DEBUG oslo_vmware.api [None req-4f0380d0-0d3b-4e8d-a9a6-09d47c765b7b tempest-VolumesAssistedSnapshotsTest-1134839840 tempest-VolumesAssistedSnapshotsTest-1134839840-project-member] Task: {'id': session[5210918f-aadc-9b29-42fa-0929c2e42627]521a8582-b1b8-40f3-8d33-8804821e9ff0, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61962) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 607.349577] env[61962]: DEBUG oslo_concurrency.lockutils [None req-4f0380d0-0d3b-4e8d-a9a6-09d47c765b7b tempest-VolumesAssistedSnapshotsTest-1134839840 tempest-VolumesAssistedSnapshotsTest-1134839840-project-member] Releasing lock "[datastore2] devstack-image-cache_base/f684bb17-3ab2-4bd5-a19e-4c36c57d0ebe" {{(pid=61962) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 607.350534] env[61962]: DEBUG nova.virt.vmwareapi.vmops [None req-4f0380d0-0d3b-4e8d-a9a6-09d47c765b7b tempest-VolumesAssistedSnapshotsTest-1134839840 tempest-VolumesAssistedSnapshotsTest-1134839840-project-member] [instance: 295064f0-9677-4287-877a-ef2a33a87fef] Processing image f684bb17-3ab2-4bd5-a19e-4c36c57d0ebe {{(pid=61962) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 607.352049] env[61962]: DEBUG oslo_concurrency.lockutils [None req-4f0380d0-0d3b-4e8d-a9a6-09d47c765b7b tempest-VolumesAssistedSnapshotsTest-1134839840 tempest-VolumesAssistedSnapshotsTest-1134839840-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/f684bb17-3ab2-4bd5-a19e-4c36c57d0ebe/f684bb17-3ab2-4bd5-a19e-4c36c57d0ebe.vmdk" {{(pid=61962) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 607.450238] env[61962]: DEBUG oslo_concurrency.lockutils [None req-e175c8ae-7160-4ad7-b0bc-3fb054dd71be tempest-ServerActionsV293TestJSON-1621295222 tempest-ServerActionsV293TestJSON-1621295222-project-member] Acquiring lock "2dad2cb0-56e2-4fb6-b540-0fd98265697f" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61962) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 607.450368] env[61962]: DEBUG oslo_concurrency.lockutils [None req-e175c8ae-7160-4ad7-b0bc-3fb054dd71be tempest-ServerActionsV293TestJSON-1621295222 tempest-ServerActionsV293TestJSON-1621295222-project-member] Lock "2dad2cb0-56e2-4fb6-b540-0fd98265697f" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61962) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 607.767396] env[61962]: DEBUG oslo_concurrency.lockutils [None req-2efed426-f73a-4ebe-8065-0c95039aec6f tempest-ServersListShow296Test-974296751 tempest-ServersListShow296Test-974296751-project-member] Acquiring lock "8436449e-8f8d-4fd0-96be-31f1464f03ae" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61962) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 607.767722] env[61962]: DEBUG oslo_concurrency.lockutils [None req-2efed426-f73a-4ebe-8065-0c95039aec6f tempest-ServersListShow296Test-974296751 tempest-ServersListShow296Test-974296751-project-member] Lock "8436449e-8f8d-4fd0-96be-31f1464f03ae" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=61962) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 609.928853] env[61962]: DEBUG oslo_concurrency.lockutils [None req-0fa19bb3-4f67-4f1b-9bd9-2589e1a304ec tempest-InstanceActionsV221TestJSON-1432779233 tempest-InstanceActionsV221TestJSON-1432779233-project-member] Acquiring lock "92f66377-ddec-4312-b10c-f7edc4855d48" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61962) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 609.929139] env[61962]: DEBUG oslo_concurrency.lockutils [None req-0fa19bb3-4f67-4f1b-9bd9-2589e1a304ec tempest-InstanceActionsV221TestJSON-1432779233 tempest-InstanceActionsV221TestJSON-1432779233-project-member] Lock "92f66377-ddec-4312-b10c-f7edc4855d48" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61962) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 610.388447] env[61962]: DEBUG nova.compute.manager [req-b2ac2d7e-5464-4c9a-8f1e-410b69cf3a65 req-d2dbf3fd-6244-48bf-ab24-1115c7370737 service nova] [instance: 295064f0-9677-4287-877a-ef2a33a87fef] Received event network-changed-5ddebb2f-8010-4827-a58c-7e5d659d38b7 {{(pid=61962) external_instance_event /opt/stack/nova/nova/compute/manager.py:11210}} [ 610.388447] env[61962]: DEBUG nova.compute.manager [req-b2ac2d7e-5464-4c9a-8f1e-410b69cf3a65 req-d2dbf3fd-6244-48bf-ab24-1115c7370737 service nova] [instance: 295064f0-9677-4287-877a-ef2a33a87fef] Refreshing instance network info cache due to event network-changed-5ddebb2f-8010-4827-a58c-7e5d659d38b7. {{(pid=61962) external_instance_event /opt/stack/nova/nova/compute/manager.py:11215}} [ 610.388696] env[61962]: DEBUG oslo_concurrency.lockutils [req-b2ac2d7e-5464-4c9a-8f1e-410b69cf3a65 req-d2dbf3fd-6244-48bf-ab24-1115c7370737 service nova] Acquiring lock "refresh_cache-295064f0-9677-4287-877a-ef2a33a87fef" {{(pid=61962) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 610.388696] env[61962]: DEBUG oslo_concurrency.lockutils [req-b2ac2d7e-5464-4c9a-8f1e-410b69cf3a65 req-d2dbf3fd-6244-48bf-ab24-1115c7370737 service nova] Acquired lock "refresh_cache-295064f0-9677-4287-877a-ef2a33a87fef" {{(pid=61962) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 610.389430] env[61962]: DEBUG nova.network.neutron [req-b2ac2d7e-5464-4c9a-8f1e-410b69cf3a65 req-d2dbf3fd-6244-48bf-ab24-1115c7370737 service nova] [instance: 295064f0-9677-4287-877a-ef2a33a87fef] Refreshing network info cache for port 5ddebb2f-8010-4827-a58c-7e5d659d38b7 {{(pid=61962) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 611.017439] env[61962]: DEBUG nova.network.neutron [req-b2ac2d7e-5464-4c9a-8f1e-410b69cf3a65 req-d2dbf3fd-6244-48bf-ab24-1115c7370737 service nova] [instance: 295064f0-9677-4287-877a-ef2a33a87fef] Updated VIF entry in instance network info cache for port 5ddebb2f-8010-4827-a58c-7e5d659d38b7. {{(pid=61962) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 611.017791] env[61962]: DEBUG nova.network.neutron [req-b2ac2d7e-5464-4c9a-8f1e-410b69cf3a65 req-d2dbf3fd-6244-48bf-ab24-1115c7370737 service nova] [instance: 295064f0-9677-4287-877a-ef2a33a87fef] Updating instance_info_cache with network_info: [{"id": "5ddebb2f-8010-4827-a58c-7e5d659d38b7", "address": "fa:16:3e:5c:3a:3a", "network": {"id": "d39a5a72-777d-4972-9688-b7ffa8e0a953", "bridge": "br-int", "label": "tempest-VolumesAssistedSnapshotsTest-1997599418-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "78e44aa9cd5642928627210447b3e177", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "838c9497-35dd-415e-96c7-8dc21b0cd4b3", "external-id": "nsx-vlan-transportzone-530", "segmentation_id": 530, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap5ddebb2f-80", "ovs_interfaceid": "5ddebb2f-8010-4827-a58c-7e5d659d38b7", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61962) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 611.030151] env[61962]: DEBUG oslo_concurrency.lockutils [req-b2ac2d7e-5464-4c9a-8f1e-410b69cf3a65 req-d2dbf3fd-6244-48bf-ab24-1115c7370737 service nova] Releasing lock "refresh_cache-295064f0-9677-4287-877a-ef2a33a87fef" {{(pid=61962) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 613.160404] env[61962]: DEBUG oslo_concurrency.lockutils [None req-780aa977-c838-472d-b3f8-e84f37c4403f tempest-ServerActionsTestOtherA-1411072425 tempest-ServerActionsTestOtherA-1411072425-project-member] Acquiring lock "17bc2689-2250-48bc-bc0e-47821b20d3ba" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61962) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 613.161140] env[61962]: DEBUG oslo_concurrency.lockutils [None req-780aa977-c838-472d-b3f8-e84f37c4403f tempest-ServerActionsTestOtherA-1411072425 tempest-ServerActionsTestOtherA-1411072425-project-member] Lock "17bc2689-2250-48bc-bc0e-47821b20d3ba" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61962) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 616.517379] env[61962]: DEBUG oslo_concurrency.lockutils [None req-b422153a-e67e-476e-bf23-70d15cc3aff1 tempest-ServersTestMultiNic-318652074 tempest-ServersTestMultiNic-318652074-project-member] Acquiring lock "146b08c3-3893-4c0e-9448-05a988c19ae8" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61962) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 616.517663] env[61962]: DEBUG oslo_concurrency.lockutils [None req-b422153a-e67e-476e-bf23-70d15cc3aff1 tempest-ServersTestMultiNic-318652074 tempest-ServersTestMultiNic-318652074-project-member] Lock "146b08c3-3893-4c0e-9448-05a988c19ae8" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61962) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 619.728434] env[61962]: DEBUG oslo_service.periodic_task [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=61962) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 619.728727] env[61962]: DEBUG oslo_service.periodic_task [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Running periodic task ComputeManager._sync_scheduler_instance_info {{(pid=61962) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 619.766554] env[61962]: DEBUG oslo_service.periodic_task [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=61962) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 619.766554] env[61962]: DEBUG oslo_service.periodic_task [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=61962) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 619.766554] env[61962]: DEBUG nova.compute.manager [None req-ba337279-4320-40ca-b616-7e590432f203 None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=61962) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10626}} [ 619.766554] env[61962]: DEBUG oslo_service.periodic_task [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Running periodic task ComputeManager.update_available_resource {{(pid=61962) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 619.784650] env[61962]: DEBUG oslo_concurrency.lockutils [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=61962) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 619.784875] env[61962]: DEBUG oslo_concurrency.lockutils [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=61962) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 619.785054] env[61962]: DEBUG oslo_concurrency.lockutils [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=61962) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 619.785239] env[61962]: DEBUG nova.compute.resource_tracker [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=61962) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 619.786423] env[61962]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-39d051a1-454a-4c53-89be-61707953d4b1 {{(pid=61962) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 619.803520] env[61962]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d7350bb5-d7ba-4bf8-9e04-aac6137e335d {{(pid=61962) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 619.825894] env[61962]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-286911fe-90e9-4f6c-8094-c9c7e87b0bb3 {{(pid=61962) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 619.836139] env[61962]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-904fc0a1-bd80-41af-80a6-f670b8395f22 {{(pid=61962) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 619.869867] env[61962]: DEBUG nova.compute.resource_tracker [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=180554MB free_disk=96GB free_vcpus=48 pci_devices=None {{(pid=61962) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 619.869867] env[61962]: DEBUG oslo_concurrency.lockutils [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=61962) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 619.869867] env[61962]: DEBUG oslo_concurrency.lockutils [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=61962) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 619.984980] env[61962]: DEBUG nova.compute.resource_tracker [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Instance 495acb0f-5b45-457e-9218-db57a3612289 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61962) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 619.987023] env[61962]: DEBUG nova.compute.resource_tracker [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Instance dc6c9cb6-419f-4122-8351-0c517fb8616d actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61962) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 619.987023] env[61962]: DEBUG nova.compute.resource_tracker [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Instance 53921884-f617-4020-b779-650becf89a58 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61962) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 619.987023] env[61962]: DEBUG nova.compute.resource_tracker [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Instance 6ba9719c-3689-4db6-8ab2-441dc52c7bfd actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61962) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 619.987023] env[61962]: DEBUG nova.compute.resource_tracker [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Instance 1a7d5a5f-c5b1-40d2-828b-090fb8e76e3d actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61962) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 619.987272] env[61962]: DEBUG nova.compute.resource_tracker [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Instance d0df932e-822b-4c60-a588-445aa2f14cdb actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61962) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 619.987272] env[61962]: DEBUG nova.compute.resource_tracker [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Instance a6e6f30d-c667-4f70-bf60-596a8fe97188 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61962) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 619.987272] env[61962]: DEBUG nova.compute.resource_tracker [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Instance b9782756-27e1-46fc-940f-069cfb9ab9f9 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61962) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 619.987272] env[61962]: DEBUG nova.compute.resource_tracker [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Instance 667f56c0-896f-4e25-9e8a-3954f7733dc0 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61962) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 619.987425] env[61962]: DEBUG nova.compute.resource_tracker [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Instance 295064f0-9677-4287-877a-ef2a33a87fef actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61962) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 620.018537] env[61962]: DEBUG nova.compute.resource_tracker [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Instance 6996289c-f881-4733-8943-98e3633e43ca has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61962) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1767}} [ 620.051242] env[61962]: DEBUG nova.compute.resource_tracker [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Instance 85c6b6af-f1b5-47ca-8e02-61c3669ec02b has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61962) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1767}} [ 620.063552] env[61962]: DEBUG nova.compute.resource_tracker [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Instance 0aa340f7-a219-47f5-9d7d-dd4062643cf7 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61962) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1767}} [ 620.080508] env[61962]: DEBUG nova.compute.resource_tracker [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Instance 968692bd-12ae-46aa-b1de-5cd312f436ee has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61962) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1767}} [ 620.098301] env[61962]: DEBUG nova.compute.resource_tracker [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Instance 09098197-59d7-4e14-b76c-8366a1ea5fd6 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61962) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1767}} [ 620.112434] env[61962]: DEBUG nova.compute.resource_tracker [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Instance 4fcdc4fd-b1f8-4188-a502-49cf675c9bdd has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61962) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1767}} [ 620.130468] env[61962]: DEBUG nova.compute.resource_tracker [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Instance d6fc81f2-0fa6-4e9c-8d1b-60f0038112b0 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61962) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1767}} [ 620.145342] env[61962]: DEBUG nova.compute.resource_tracker [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Instance 511ce01a-0bea-49ba-b0e3-2a006ee88d32 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61962) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1767}} [ 620.158489] env[61962]: DEBUG nova.compute.resource_tracker [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Instance b241f341-8413-4f6f-b054-d9209640d652 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61962) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1767}} [ 620.172492] env[61962]: DEBUG nova.compute.resource_tracker [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Instance cfc08011-0291-450a-96eb-c4f8002e07e5 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61962) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1767}} [ 620.185614] env[61962]: DEBUG nova.compute.resource_tracker [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Instance a20846de-721a-4bb9-9826-e7c7ebf170c4 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61962) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1767}} [ 620.198372] env[61962]: DEBUG nova.compute.resource_tracker [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Instance ff7d9da5-bc9a-41f2-a2e4-521d87b22117 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61962) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1767}} [ 620.209359] env[61962]: DEBUG nova.compute.resource_tracker [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Instance 2dad2cb0-56e2-4fb6-b540-0fd98265697f has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61962) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1767}} [ 620.223711] env[61962]: DEBUG nova.compute.resource_tracker [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Instance 8436449e-8f8d-4fd0-96be-31f1464f03ae has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61962) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1767}} [ 620.238940] env[61962]: DEBUG nova.compute.resource_tracker [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Instance 92f66377-ddec-4312-b10c-f7edc4855d48 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61962) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1767}} [ 620.258320] env[61962]: DEBUG nova.compute.resource_tracker [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Instance 17bc2689-2250-48bc-bc0e-47821b20d3ba has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61962) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1767}} [ 620.286443] env[61962]: DEBUG nova.compute.resource_tracker [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Instance 146b08c3-3893-4c0e-9448-05a988c19ae8 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61962) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1767}} [ 620.286714] env[61962]: DEBUG nova.compute.resource_tracker [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Total usable vcpus: 48, total allocated vcpus: 10 {{(pid=61962) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 620.286993] env[61962]: DEBUG nova.compute.resource_tracker [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=1792MB phys_disk=100GB used_disk=10GB total_vcpus=48 used_vcpus=10 pci_stats=[] stats={'failed_builds': '1', 'num_instances': '10', 'num_vm_building': '10', 'num_task_spawning': '10', 'num_os_type_None': '10', 'num_proj_5387c68fd60c4139a463bca1d3255da0': '1', 'io_workload': '10', 'num_proj_c7a5eef5ba8d4ba88356ffba87aec75c': '1', 'num_proj_f1aff07cb3e74298b4e347bd4176fbf8': '1', 'num_proj_1dd984fe1bf84471ae103d28565d160b': '1', 'num_proj_13cf9ead87c749fc963fbe35b22b4fd6': '1', 'num_proj_e58b68272c404983993a09ad12e8c03b': '1', 'num_proj_6e7e747becdf4976b570b79d4b20d88b': '1', 'num_proj_a7b748ae1e6d4e17bc463811008c5596': '1', 'num_proj_6c00b1a76e464ad2a9788673e256de22': '1', 'num_proj_78e44aa9cd5642928627210447b3e177': '1'} {{(pid=61962) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 620.808138] env[61962]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-684f2bf8-1cb7-4522-9978-968984cf447f {{(pid=61962) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 620.816965] env[61962]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-eadf697b-eb05-4f43-a0b9-972491398275 {{(pid=61962) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 620.850854] env[61962]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1252e8da-f93c-4368-9606-4d672e58790b {{(pid=61962) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 620.859292] env[61962]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dd55b0bd-d6ac-4a66-b56e-87c7e511ae7a {{(pid=61962) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 620.877798] env[61962]: DEBUG nova.compute.provider_tree [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Inventory has not changed in ProviderTree for provider: 5a20dc57-fddd-49ec-bab5-953a03eebaa1 {{(pid=61962) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 620.890873] env[61962]: DEBUG nova.scheduler.client.report [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Inventory has not changed for provider 5a20dc57-fddd-49ec-bab5-953a03eebaa1 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 96, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61962) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 620.911427] env[61962]: DEBUG nova.compute.resource_tracker [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=61962) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 620.912504] env[61962]: DEBUG oslo_concurrency.lockutils [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 1.043s {{(pid=61962) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 621.305969] env[61962]: DEBUG oslo_service.periodic_task [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=61962) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 621.306093] env[61962]: DEBUG nova.compute.manager [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Starting heal instance info cache {{(pid=61962) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10007}} [ 621.306218] env[61962]: DEBUG nova.compute.manager [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Rebuilding the list of instances to heal {{(pid=61962) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10011}} [ 621.341318] env[61962]: DEBUG nova.compute.manager [None req-ba337279-4320-40ca-b616-7e590432f203 None None] [instance: 495acb0f-5b45-457e-9218-db57a3612289] Skipping network cache update for instance because it is Building. {{(pid=61962) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10020}} [ 621.342024] env[61962]: DEBUG nova.compute.manager [None req-ba337279-4320-40ca-b616-7e590432f203 None None] [instance: 53921884-f617-4020-b779-650becf89a58] Skipping network cache update for instance because it is Building. {{(pid=61962) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10020}} [ 621.342024] env[61962]: DEBUG nova.compute.manager [None req-ba337279-4320-40ca-b616-7e590432f203 None None] [instance: dc6c9cb6-419f-4122-8351-0c517fb8616d] Skipping network cache update for instance because it is Building. {{(pid=61962) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10020}} [ 621.342024] env[61962]: DEBUG nova.compute.manager [None req-ba337279-4320-40ca-b616-7e590432f203 None None] [instance: 6ba9719c-3689-4db6-8ab2-441dc52c7bfd] Skipping network cache update for instance because it is Building. {{(pid=61962) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10020}} [ 621.342024] env[61962]: DEBUG nova.compute.manager [None req-ba337279-4320-40ca-b616-7e590432f203 None None] [instance: 1a7d5a5f-c5b1-40d2-828b-090fb8e76e3d] Skipping network cache update for instance because it is Building. {{(pid=61962) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10020}} [ 621.342024] env[61962]: DEBUG nova.compute.manager [None req-ba337279-4320-40ca-b616-7e590432f203 None None] [instance: d0df932e-822b-4c60-a588-445aa2f14cdb] Skipping network cache update for instance because it is Building. {{(pid=61962) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10020}} [ 621.342285] env[61962]: DEBUG nova.compute.manager [None req-ba337279-4320-40ca-b616-7e590432f203 None None] [instance: a6e6f30d-c667-4f70-bf60-596a8fe97188] Skipping network cache update for instance because it is Building. {{(pid=61962) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10020}} [ 621.342285] env[61962]: DEBUG nova.compute.manager [None req-ba337279-4320-40ca-b616-7e590432f203 None None] [instance: b9782756-27e1-46fc-940f-069cfb9ab9f9] Skipping network cache update for instance because it is Building. {{(pid=61962) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10020}} [ 621.342507] env[61962]: DEBUG nova.compute.manager [None req-ba337279-4320-40ca-b616-7e590432f203 None None] [instance: 667f56c0-896f-4e25-9e8a-3954f7733dc0] Skipping network cache update for instance because it is Building. {{(pid=61962) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10020}} [ 621.342507] env[61962]: DEBUG nova.compute.manager [None req-ba337279-4320-40ca-b616-7e590432f203 None None] [instance: 295064f0-9677-4287-877a-ef2a33a87fef] Skipping network cache update for instance because it is Building. {{(pid=61962) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10020}} [ 621.342584] env[61962]: DEBUG nova.compute.manager [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Didn't find any instances for network info cache update. {{(pid=61962) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10093}} [ 621.343098] env[61962]: DEBUG oslo_service.periodic_task [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=61962) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 621.343283] env[61962]: DEBUG oslo_service.periodic_task [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=61962) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 621.343434] env[61962]: DEBUG oslo_service.periodic_task [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=61962) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 621.343729] env[61962]: DEBUG oslo_service.periodic_task [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=61962) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 624.209518] env[61962]: DEBUG oslo_concurrency.lockutils [None req-0c7770ba-e86c-4883-8d4f-ff07f7134239 tempest-AttachVolumeTestJSON-1002857752 tempest-AttachVolumeTestJSON-1002857752-project-member] Acquiring lock "2ef4b06a-c5c6-46ed-89d8-30143aa30a25" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61962) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 624.209518] env[61962]: DEBUG oslo_concurrency.lockutils [None req-0c7770ba-e86c-4883-8d4f-ff07f7134239 tempest-AttachVolumeTestJSON-1002857752 tempest-AttachVolumeTestJSON-1002857752-project-member] Lock "2ef4b06a-c5c6-46ed-89d8-30143aa30a25" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61962) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 625.027860] env[61962]: DEBUG oslo_concurrency.lockutils [None req-a06e4745-56c4-49f7-b260-41d5e755269d tempest-VolumesAdminNegativeTest-1349254896 tempest-VolumesAdminNegativeTest-1349254896-project-member] Acquiring lock "2142b6e8-d060-48eb-8ff1-3e92e6188a83" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61962) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 625.028280] env[61962]: DEBUG oslo_concurrency.lockutils [None req-a06e4745-56c4-49f7-b260-41d5e755269d tempest-VolumesAdminNegativeTest-1349254896 tempest-VolumesAdminNegativeTest-1349254896-project-member] Lock "2142b6e8-d060-48eb-8ff1-3e92e6188a83" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=61962) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 631.768971] env[61962]: DEBUG oslo_concurrency.lockutils [None req-66ccc47b-35fa-4b88-a452-e1d0ad54e239 tempest-ServerDiskConfigTestJSON-1190480345 tempest-ServerDiskConfigTestJSON-1190480345-project-member] Acquiring lock "dbe5937a-cf98-4abc-ad5a-521762e0be9d" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61962) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 631.769370] env[61962]: DEBUG oslo_concurrency.lockutils [None req-66ccc47b-35fa-4b88-a452-e1d0ad54e239 tempest-ServerDiskConfigTestJSON-1190480345 tempest-ServerDiskConfigTestJSON-1190480345-project-member] Lock "dbe5937a-cf98-4abc-ad5a-521762e0be9d" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61962) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 641.259527] env[61962]: DEBUG oslo_concurrency.lockutils [None req-bb9b0d23-3bd4-41a5-986c-f09e040cfac8 tempest-AttachInterfacesV270Test-634782997 tempest-AttachInterfacesV270Test-634782997-project-member] Acquiring lock "b387b903-2f5c-40ad-aeea-b013fb66e17d" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61962) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 641.259815] env[61962]: DEBUG oslo_concurrency.lockutils [None req-bb9b0d23-3bd4-41a5-986c-f09e040cfac8 tempest-AttachInterfacesV270Test-634782997 tempest-AttachInterfacesV270Test-634782997-project-member] Lock "b387b903-2f5c-40ad-aeea-b013fb66e17d" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61962) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 646.386229] env[61962]: WARNING oslo_vmware.rw_handles [None req-fbaae0eb-893e-49a1-93b8-ba5ae997b76b tempest-ServerDiagnosticsTest-151015285 tempest-ServerDiagnosticsTest-151015285-project-member] Error occurred while reading the HTTP response.: http.client.RemoteDisconnected: Remote end closed connection without response [ 646.386229] env[61962]: ERROR oslo_vmware.rw_handles Traceback (most recent call last): [ 646.386229] env[61962]: ERROR oslo_vmware.rw_handles File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py", line 283, in close [ 646.386229] env[61962]: ERROR oslo_vmware.rw_handles self._conn.getresponse() [ 646.386229] env[61962]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 1375, in getresponse [ 646.386229] env[61962]: ERROR oslo_vmware.rw_handles response.begin() [ 646.386229] env[61962]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 318, in begin [ 646.386229] env[61962]: ERROR oslo_vmware.rw_handles version, status, reason = self._read_status() [ 646.386229] env[61962]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 287, in _read_status [ 646.386229] env[61962]: ERROR oslo_vmware.rw_handles raise RemoteDisconnected("Remote end closed connection without" [ 646.386229] env[61962]: ERROR oslo_vmware.rw_handles http.client.RemoteDisconnected: Remote end closed connection without response [ 646.386229] env[61962]: ERROR oslo_vmware.rw_handles [ 646.386807] env[61962]: DEBUG nova.virt.vmwareapi.images [None req-fbaae0eb-893e-49a1-93b8-ba5ae997b76b tempest-ServerDiagnosticsTest-151015285 tempest-ServerDiagnosticsTest-151015285-project-member] [instance: 495acb0f-5b45-457e-9218-db57a3612289] Downloaded image file data f684bb17-3ab2-4bd5-a19e-4c36c57d0ebe to vmware_temp/b099c35d-ae6a-49f2-9586-0c446aa97e67/f684bb17-3ab2-4bd5-a19e-4c36c57d0ebe/tmp-sparse.vmdk on the data store datastore2 {{(pid=61962) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:258}} [ 646.387998] env[61962]: DEBUG nova.virt.vmwareapi.vmops [None req-fbaae0eb-893e-49a1-93b8-ba5ae997b76b tempest-ServerDiagnosticsTest-151015285 tempest-ServerDiagnosticsTest-151015285-project-member] [instance: 495acb0f-5b45-457e-9218-db57a3612289] Caching image {{(pid=61962) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 646.388255] env[61962]: DEBUG nova.virt.vmwareapi.vm_util [None req-fbaae0eb-893e-49a1-93b8-ba5ae997b76b tempest-ServerDiagnosticsTest-151015285 tempest-ServerDiagnosticsTest-151015285-project-member] Copying Virtual Disk [datastore2] vmware_temp/b099c35d-ae6a-49f2-9586-0c446aa97e67/f684bb17-3ab2-4bd5-a19e-4c36c57d0ebe/tmp-sparse.vmdk to [datastore2] vmware_temp/b099c35d-ae6a-49f2-9586-0c446aa97e67/f684bb17-3ab2-4bd5-a19e-4c36c57d0ebe/f684bb17-3ab2-4bd5-a19e-4c36c57d0ebe.vmdk {{(pid=61962) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 646.388534] env[61962]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-4312d61d-a916-43af-bc72-3ade24b29eee {{(pid=61962) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 646.397695] env[61962]: DEBUG oslo_vmware.api [None req-fbaae0eb-893e-49a1-93b8-ba5ae997b76b tempest-ServerDiagnosticsTest-151015285 tempest-ServerDiagnosticsTest-151015285-project-member] Waiting for the task: (returnval){ [ 646.397695] env[61962]: value = "task-4891946" [ 646.397695] env[61962]: _type = "Task" [ 646.397695] env[61962]: } to complete. {{(pid=61962) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 646.406891] env[61962]: DEBUG oslo_vmware.api [None req-fbaae0eb-893e-49a1-93b8-ba5ae997b76b tempest-ServerDiagnosticsTest-151015285 tempest-ServerDiagnosticsTest-151015285-project-member] Task: {'id': task-4891946, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61962) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 646.909970] env[61962]: DEBUG oslo_vmware.exceptions [None req-fbaae0eb-893e-49a1-93b8-ba5ae997b76b tempest-ServerDiagnosticsTest-151015285 tempest-ServerDiagnosticsTest-151015285-project-member] Fault InvalidArgument not matched. {{(pid=61962) get_fault_class /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/exceptions.py:290}} [ 646.910217] env[61962]: DEBUG oslo_concurrency.lockutils [None req-fbaae0eb-893e-49a1-93b8-ba5ae997b76b tempest-ServerDiagnosticsTest-151015285 tempest-ServerDiagnosticsTest-151015285-project-member] Releasing lock "[datastore2] devstack-image-cache_base/f684bb17-3ab2-4bd5-a19e-4c36c57d0ebe/f684bb17-3ab2-4bd5-a19e-4c36c57d0ebe.vmdk" {{(pid=61962) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 646.910760] env[61962]: ERROR nova.compute.manager [None req-fbaae0eb-893e-49a1-93b8-ba5ae997b76b tempest-ServerDiagnosticsTest-151015285 tempest-ServerDiagnosticsTest-151015285-project-member] [instance: 495acb0f-5b45-457e-9218-db57a3612289] Instance failed to spawn: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 646.910760] env[61962]: Faults: ['InvalidArgument'] [ 646.910760] env[61962]: ERROR nova.compute.manager [instance: 495acb0f-5b45-457e-9218-db57a3612289] Traceback (most recent call last): [ 646.910760] env[61962]: ERROR nova.compute.manager [instance: 495acb0f-5b45-457e-9218-db57a3612289] File "/opt/stack/nova/nova/compute/manager.py", line 2886, in _build_resources [ 646.910760] env[61962]: ERROR nova.compute.manager [instance: 495acb0f-5b45-457e-9218-db57a3612289] yield resources [ 646.910760] env[61962]: ERROR nova.compute.manager [instance: 495acb0f-5b45-457e-9218-db57a3612289] File "/opt/stack/nova/nova/compute/manager.py", line 2633, in _build_and_run_instance [ 646.910760] env[61962]: ERROR nova.compute.manager [instance: 495acb0f-5b45-457e-9218-db57a3612289] self.driver.spawn(context, instance, image_meta, [ 646.910760] env[61962]: ERROR nova.compute.manager [instance: 495acb0f-5b45-457e-9218-db57a3612289] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 646.910760] env[61962]: ERROR nova.compute.manager [instance: 495acb0f-5b45-457e-9218-db57a3612289] self._vmops.spawn(context, instance, image_meta, injected_files, [ 646.910760] env[61962]: ERROR nova.compute.manager [instance: 495acb0f-5b45-457e-9218-db57a3612289] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 646.910760] env[61962]: ERROR nova.compute.manager [instance: 495acb0f-5b45-457e-9218-db57a3612289] self._fetch_image_if_missing(context, vi) [ 646.910760] env[61962]: ERROR nova.compute.manager [instance: 495acb0f-5b45-457e-9218-db57a3612289] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 646.911078] env[61962]: ERROR nova.compute.manager [instance: 495acb0f-5b45-457e-9218-db57a3612289] image_cache(vi, tmp_image_ds_loc) [ 646.911078] env[61962]: ERROR nova.compute.manager [instance: 495acb0f-5b45-457e-9218-db57a3612289] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 646.911078] env[61962]: ERROR nova.compute.manager [instance: 495acb0f-5b45-457e-9218-db57a3612289] vm_util.copy_virtual_disk( [ 646.911078] env[61962]: ERROR nova.compute.manager [instance: 495acb0f-5b45-457e-9218-db57a3612289] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 646.911078] env[61962]: ERROR nova.compute.manager [instance: 495acb0f-5b45-457e-9218-db57a3612289] session._wait_for_task(vmdk_copy_task) [ 646.911078] env[61962]: ERROR nova.compute.manager [instance: 495acb0f-5b45-457e-9218-db57a3612289] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 646.911078] env[61962]: ERROR nova.compute.manager [instance: 495acb0f-5b45-457e-9218-db57a3612289] return self.wait_for_task(task_ref) [ 646.911078] env[61962]: ERROR nova.compute.manager [instance: 495acb0f-5b45-457e-9218-db57a3612289] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 646.911078] env[61962]: ERROR nova.compute.manager [instance: 495acb0f-5b45-457e-9218-db57a3612289] return evt.wait() [ 646.911078] env[61962]: ERROR nova.compute.manager [instance: 495acb0f-5b45-457e-9218-db57a3612289] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 646.911078] env[61962]: ERROR nova.compute.manager [instance: 495acb0f-5b45-457e-9218-db57a3612289] result = hub.switch() [ 646.911078] env[61962]: ERROR nova.compute.manager [instance: 495acb0f-5b45-457e-9218-db57a3612289] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 646.911078] env[61962]: ERROR nova.compute.manager [instance: 495acb0f-5b45-457e-9218-db57a3612289] return self.greenlet.switch() [ 646.911388] env[61962]: ERROR nova.compute.manager [instance: 495acb0f-5b45-457e-9218-db57a3612289] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 646.911388] env[61962]: ERROR nova.compute.manager [instance: 495acb0f-5b45-457e-9218-db57a3612289] self.f(*self.args, **self.kw) [ 646.911388] env[61962]: ERROR nova.compute.manager [instance: 495acb0f-5b45-457e-9218-db57a3612289] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 646.911388] env[61962]: ERROR nova.compute.manager [instance: 495acb0f-5b45-457e-9218-db57a3612289] raise exceptions.translate_fault(task_info.error) [ 646.911388] env[61962]: ERROR nova.compute.manager [instance: 495acb0f-5b45-457e-9218-db57a3612289] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 646.911388] env[61962]: ERROR nova.compute.manager [instance: 495acb0f-5b45-457e-9218-db57a3612289] Faults: ['InvalidArgument'] [ 646.911388] env[61962]: ERROR nova.compute.manager [instance: 495acb0f-5b45-457e-9218-db57a3612289] [ 646.911388] env[61962]: INFO nova.compute.manager [None req-fbaae0eb-893e-49a1-93b8-ba5ae997b76b tempest-ServerDiagnosticsTest-151015285 tempest-ServerDiagnosticsTest-151015285-project-member] [instance: 495acb0f-5b45-457e-9218-db57a3612289] Terminating instance [ 646.912592] env[61962]: DEBUG oslo_concurrency.lockutils [None req-29af0b05-c0ff-41ef-8103-714fd2cae28f tempest-ServerDiagnosticsNegativeTest-1190275590 tempest-ServerDiagnosticsNegativeTest-1190275590-project-member] Acquired lock "[datastore2] devstack-image-cache_base/f684bb17-3ab2-4bd5-a19e-4c36c57d0ebe/f684bb17-3ab2-4bd5-a19e-4c36c57d0ebe.vmdk" {{(pid=61962) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 646.912788] env[61962]: DEBUG nova.virt.vmwareapi.ds_util [None req-29af0b05-c0ff-41ef-8103-714fd2cae28f tempest-ServerDiagnosticsNegativeTest-1190275590 tempest-ServerDiagnosticsNegativeTest-1190275590-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=61962) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 646.913401] env[61962]: DEBUG nova.compute.manager [None req-fbaae0eb-893e-49a1-93b8-ba5ae997b76b tempest-ServerDiagnosticsTest-151015285 tempest-ServerDiagnosticsTest-151015285-project-member] [instance: 495acb0f-5b45-457e-9218-db57a3612289] Start destroying the instance on the hypervisor. {{(pid=61962) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3142}} [ 646.913580] env[61962]: DEBUG nova.virt.vmwareapi.vmops [None req-fbaae0eb-893e-49a1-93b8-ba5ae997b76b tempest-ServerDiagnosticsTest-151015285 tempest-ServerDiagnosticsTest-151015285-project-member] [instance: 495acb0f-5b45-457e-9218-db57a3612289] Destroying instance {{(pid=61962) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 646.913797] env[61962]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-1ea919f6-9b12-49ae-82f2-03744cd80ef9 {{(pid=61962) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 646.916137] env[61962]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-446f366b-390d-4066-ab57-021203910feb {{(pid=61962) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 646.923314] env[61962]: DEBUG nova.virt.vmwareapi.vmops [None req-fbaae0eb-893e-49a1-93b8-ba5ae997b76b tempest-ServerDiagnosticsTest-151015285 tempest-ServerDiagnosticsTest-151015285-project-member] [instance: 495acb0f-5b45-457e-9218-db57a3612289] Unregistering the VM {{(pid=61962) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 646.923551] env[61962]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-1b9ab656-8c4b-4b04-beae-6fd103464131 {{(pid=61962) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 646.926494] env[61962]: DEBUG nova.virt.vmwareapi.ds_util [None req-29af0b05-c0ff-41ef-8103-714fd2cae28f tempest-ServerDiagnosticsNegativeTest-1190275590 tempest-ServerDiagnosticsNegativeTest-1190275590-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=61962) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 646.926567] env[61962]: DEBUG nova.virt.vmwareapi.vmops [None req-29af0b05-c0ff-41ef-8103-714fd2cae28f tempest-ServerDiagnosticsNegativeTest-1190275590 tempest-ServerDiagnosticsNegativeTest-1190275590-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=61962) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 646.927218] env[61962]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-02bab97f-6f4e-4e20-9494-b07f0686ae39 {{(pid=61962) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 646.932244] env[61962]: DEBUG oslo_vmware.api [None req-29af0b05-c0ff-41ef-8103-714fd2cae28f tempest-ServerDiagnosticsNegativeTest-1190275590 tempest-ServerDiagnosticsNegativeTest-1190275590-project-member] Waiting for the task: (returnval){ [ 646.932244] env[61962]: value = "session[5210918f-aadc-9b29-42fa-0929c2e42627]520e13a1-3881-a36b-a137-dc204c40d9f6" [ 646.932244] env[61962]: _type = "Task" [ 646.932244] env[61962]: } to complete. {{(pid=61962) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 646.939911] env[61962]: DEBUG oslo_vmware.api [None req-29af0b05-c0ff-41ef-8103-714fd2cae28f tempest-ServerDiagnosticsNegativeTest-1190275590 tempest-ServerDiagnosticsNegativeTest-1190275590-project-member] Task: {'id': session[5210918f-aadc-9b29-42fa-0929c2e42627]520e13a1-3881-a36b-a137-dc204c40d9f6, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61962) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 646.989543] env[61962]: DEBUG nova.virt.vmwareapi.vmops [None req-fbaae0eb-893e-49a1-93b8-ba5ae997b76b tempest-ServerDiagnosticsTest-151015285 tempest-ServerDiagnosticsTest-151015285-project-member] [instance: 495acb0f-5b45-457e-9218-db57a3612289] Unregistered the VM {{(pid=61962) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 646.989686] env[61962]: DEBUG nova.virt.vmwareapi.vmops [None req-fbaae0eb-893e-49a1-93b8-ba5ae997b76b tempest-ServerDiagnosticsTest-151015285 tempest-ServerDiagnosticsTest-151015285-project-member] [instance: 495acb0f-5b45-457e-9218-db57a3612289] Deleting contents of the VM from datastore datastore2 {{(pid=61962) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 646.991193] env[61962]: DEBUG nova.virt.vmwareapi.ds_util [None req-fbaae0eb-893e-49a1-93b8-ba5ae997b76b tempest-ServerDiagnosticsTest-151015285 tempest-ServerDiagnosticsTest-151015285-project-member] Deleting the datastore file [datastore2] 495acb0f-5b45-457e-9218-db57a3612289 {{(pid=61962) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 646.991193] env[61962]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-dd5c041c-2437-4ba8-a338-a910dfa18b4f {{(pid=61962) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 646.996584] env[61962]: DEBUG oslo_vmware.api [None req-fbaae0eb-893e-49a1-93b8-ba5ae997b76b tempest-ServerDiagnosticsTest-151015285 tempest-ServerDiagnosticsTest-151015285-project-member] Waiting for the task: (returnval){ [ 646.996584] env[61962]: value = "task-4891948" [ 646.996584] env[61962]: _type = "Task" [ 646.996584] env[61962]: } to complete. {{(pid=61962) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 647.004943] env[61962]: DEBUG oslo_vmware.api [None req-fbaae0eb-893e-49a1-93b8-ba5ae997b76b tempest-ServerDiagnosticsTest-151015285 tempest-ServerDiagnosticsTest-151015285-project-member] Task: {'id': task-4891948, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61962) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 647.443273] env[61962]: DEBUG nova.virt.vmwareapi.vmops [None req-29af0b05-c0ff-41ef-8103-714fd2cae28f tempest-ServerDiagnosticsNegativeTest-1190275590 tempest-ServerDiagnosticsNegativeTest-1190275590-project-member] [instance: 53921884-f617-4020-b779-650becf89a58] Preparing fetch location {{(pid=61962) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 647.443645] env[61962]: DEBUG nova.virt.vmwareapi.ds_util [None req-29af0b05-c0ff-41ef-8103-714fd2cae28f tempest-ServerDiagnosticsNegativeTest-1190275590 tempest-ServerDiagnosticsNegativeTest-1190275590-project-member] Creating directory with path [datastore2] vmware_temp/1a7f168f-47fe-419c-9b06-3e30ff328019/f684bb17-3ab2-4bd5-a19e-4c36c57d0ebe {{(pid=61962) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 647.443848] env[61962]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-14007b70-d0ef-4bb3-a714-3ecc0ecee895 {{(pid=61962) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 647.456960] env[61962]: DEBUG nova.virt.vmwareapi.ds_util [None req-29af0b05-c0ff-41ef-8103-714fd2cae28f tempest-ServerDiagnosticsNegativeTest-1190275590 tempest-ServerDiagnosticsNegativeTest-1190275590-project-member] Created directory with path [datastore2] vmware_temp/1a7f168f-47fe-419c-9b06-3e30ff328019/f684bb17-3ab2-4bd5-a19e-4c36c57d0ebe {{(pid=61962) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 647.457247] env[61962]: DEBUG nova.virt.vmwareapi.vmops [None req-29af0b05-c0ff-41ef-8103-714fd2cae28f tempest-ServerDiagnosticsNegativeTest-1190275590 tempest-ServerDiagnosticsNegativeTest-1190275590-project-member] [instance: 53921884-f617-4020-b779-650becf89a58] Fetch image to [datastore2] vmware_temp/1a7f168f-47fe-419c-9b06-3e30ff328019/f684bb17-3ab2-4bd5-a19e-4c36c57d0ebe/tmp-sparse.vmdk {{(pid=61962) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 647.457440] env[61962]: DEBUG nova.virt.vmwareapi.vmops [None req-29af0b05-c0ff-41ef-8103-714fd2cae28f tempest-ServerDiagnosticsNegativeTest-1190275590 tempest-ServerDiagnosticsNegativeTest-1190275590-project-member] [instance: 53921884-f617-4020-b779-650becf89a58] Downloading image file data f684bb17-3ab2-4bd5-a19e-4c36c57d0ebe to [datastore2] vmware_temp/1a7f168f-47fe-419c-9b06-3e30ff328019/f684bb17-3ab2-4bd5-a19e-4c36c57d0ebe/tmp-sparse.vmdk on the data store datastore2 {{(pid=61962) _fetch_image_as_file /opt/stack/nova/nova/virt/vmwareapi/vmops.py:399}} [ 647.458261] env[61962]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a4504c49-837f-4e95-b940-05236e91b0e8 {{(pid=61962) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 647.465725] env[61962]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b07629c3-2998-47ae-a976-ad49e2e75718 {{(pid=61962) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 647.475738] env[61962]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-11cdf750-0be9-4696-a04c-1c8efb9ca571 {{(pid=61962) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 647.509822] env[61962]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8bea9e99-f619-4748-8af3-b93c47bfe4a4 {{(pid=61962) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 647.518744] env[61962]: DEBUG oslo_vmware.api [None req-fbaae0eb-893e-49a1-93b8-ba5ae997b76b tempest-ServerDiagnosticsTest-151015285 tempest-ServerDiagnosticsTest-151015285-project-member] Task: {'id': task-4891948, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.087975} completed successfully. {{(pid=61962) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 647.518945] env[61962]: DEBUG oslo_vmware.service [-] Invoking SessionManager.AcquireGenericServiceTicket with opID=oslo.vmware-0076d187-949c-4ca3-a7d9-82aa8984aa67 {{(pid=61962) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 647.520620] env[61962]: DEBUG nova.virt.vmwareapi.ds_util [None req-fbaae0eb-893e-49a1-93b8-ba5ae997b76b tempest-ServerDiagnosticsTest-151015285 tempest-ServerDiagnosticsTest-151015285-project-member] Deleted the datastore file {{(pid=61962) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 647.520810] env[61962]: DEBUG nova.virt.vmwareapi.vmops [None req-fbaae0eb-893e-49a1-93b8-ba5ae997b76b tempest-ServerDiagnosticsTest-151015285 tempest-ServerDiagnosticsTest-151015285-project-member] [instance: 495acb0f-5b45-457e-9218-db57a3612289] Deleted contents of the VM from datastore datastore2 {{(pid=61962) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 647.520981] env[61962]: DEBUG nova.virt.vmwareapi.vmops [None req-fbaae0eb-893e-49a1-93b8-ba5ae997b76b tempest-ServerDiagnosticsTest-151015285 tempest-ServerDiagnosticsTest-151015285-project-member] [instance: 495acb0f-5b45-457e-9218-db57a3612289] Instance destroyed {{(pid=61962) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 647.521171] env[61962]: INFO nova.compute.manager [None req-fbaae0eb-893e-49a1-93b8-ba5ae997b76b tempest-ServerDiagnosticsTest-151015285 tempest-ServerDiagnosticsTest-151015285-project-member] [instance: 495acb0f-5b45-457e-9218-db57a3612289] Took 0.61 seconds to destroy the instance on the hypervisor. [ 647.523642] env[61962]: DEBUG nova.compute.claims [None req-fbaae0eb-893e-49a1-93b8-ba5ae997b76b tempest-ServerDiagnosticsTest-151015285 tempest-ServerDiagnosticsTest-151015285-project-member] [instance: 495acb0f-5b45-457e-9218-db57a3612289] Aborting claim: {{(pid=61962) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 647.523810] env[61962]: DEBUG oslo_concurrency.lockutils [None req-fbaae0eb-893e-49a1-93b8-ba5ae997b76b tempest-ServerDiagnosticsTest-151015285 tempest-ServerDiagnosticsTest-151015285-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=61962) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 647.524032] env[61962]: DEBUG oslo_concurrency.lockutils [None req-fbaae0eb-893e-49a1-93b8-ba5ae997b76b tempest-ServerDiagnosticsTest-151015285 tempest-ServerDiagnosticsTest-151015285-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 0.000s {{(pid=61962) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 647.547063] env[61962]: DEBUG nova.virt.vmwareapi.images [None req-29af0b05-c0ff-41ef-8103-714fd2cae28f tempest-ServerDiagnosticsNegativeTest-1190275590 tempest-ServerDiagnosticsNegativeTest-1190275590-project-member] [instance: 53921884-f617-4020-b779-650becf89a58] Downloading image file data f684bb17-3ab2-4bd5-a19e-4c36c57d0ebe to the data store datastore2 {{(pid=61962) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:245}} [ 647.605263] env[61962]: DEBUG oslo_vmware.rw_handles [None req-29af0b05-c0ff-41ef-8103-714fd2cae28f tempest-ServerDiagnosticsNegativeTest-1190275590 tempest-ServerDiagnosticsNegativeTest-1190275590-project-member] Creating HTTP connection to write to file with size = 21318656 and URL = https://esx7c2n3.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/1a7f168f-47fe-419c-9b06-3e30ff328019/f684bb17-3ab2-4bd5-a19e-4c36c57d0ebe/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=61962) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 647.664104] env[61962]: DEBUG oslo_vmware.rw_handles [None req-29af0b05-c0ff-41ef-8103-714fd2cae28f tempest-ServerDiagnosticsNegativeTest-1190275590 tempest-ServerDiagnosticsNegativeTest-1190275590-project-member] Completed reading data from the image iterator. {{(pid=61962) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 647.664302] env[61962]: DEBUG oslo_vmware.rw_handles [None req-29af0b05-c0ff-41ef-8103-714fd2cae28f tempest-ServerDiagnosticsNegativeTest-1190275590 tempest-ServerDiagnosticsNegativeTest-1190275590-project-member] Closing write handle for https://esx7c2n3.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/1a7f168f-47fe-419c-9b06-3e30ff328019/f684bb17-3ab2-4bd5-a19e-4c36c57d0ebe/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=61962) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:281}} [ 648.031033] env[61962]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1d494293-533e-4d75-aedf-5fd7c5c68be5 {{(pid=61962) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 648.037810] env[61962]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-28801d25-6c24-4ee8-9c67-a04939a56609 {{(pid=61962) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 648.067956] env[61962]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a2670a87-122c-47eb-8e39-5593cc6b3c42 {{(pid=61962) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 648.076502] env[61962]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-879890ec-25f1-4370-bf0e-32a62b4f01fe {{(pid=61962) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 648.090182] env[61962]: DEBUG nova.compute.provider_tree [None req-fbaae0eb-893e-49a1-93b8-ba5ae997b76b tempest-ServerDiagnosticsTest-151015285 tempest-ServerDiagnosticsTest-151015285-project-member] Inventory has not changed in ProviderTree for provider: 5a20dc57-fddd-49ec-bab5-953a03eebaa1 {{(pid=61962) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 648.117576] env[61962]: DEBUG nova.scheduler.client.report [None req-fbaae0eb-893e-49a1-93b8-ba5ae997b76b tempest-ServerDiagnosticsTest-151015285 tempest-ServerDiagnosticsTest-151015285-project-member] Inventory has not changed for provider 5a20dc57-fddd-49ec-bab5-953a03eebaa1 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 96, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61962) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 648.131787] env[61962]: DEBUG oslo_concurrency.lockutils [None req-fbaae0eb-893e-49a1-93b8-ba5ae997b76b tempest-ServerDiagnosticsTest-151015285 tempest-ServerDiagnosticsTest-151015285-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 0.608s {{(pid=61962) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 648.132337] env[61962]: ERROR nova.compute.manager [None req-fbaae0eb-893e-49a1-93b8-ba5ae997b76b tempest-ServerDiagnosticsTest-151015285 tempest-ServerDiagnosticsTest-151015285-project-member] [instance: 495acb0f-5b45-457e-9218-db57a3612289] Failed to build and run instance: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 648.132337] env[61962]: Faults: ['InvalidArgument'] [ 648.132337] env[61962]: ERROR nova.compute.manager [instance: 495acb0f-5b45-457e-9218-db57a3612289] Traceback (most recent call last): [ 648.132337] env[61962]: ERROR nova.compute.manager [instance: 495acb0f-5b45-457e-9218-db57a3612289] File "/opt/stack/nova/nova/compute/manager.py", line 2633, in _build_and_run_instance [ 648.132337] env[61962]: ERROR nova.compute.manager [instance: 495acb0f-5b45-457e-9218-db57a3612289] self.driver.spawn(context, instance, image_meta, [ 648.132337] env[61962]: ERROR nova.compute.manager [instance: 495acb0f-5b45-457e-9218-db57a3612289] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 648.132337] env[61962]: ERROR nova.compute.manager [instance: 495acb0f-5b45-457e-9218-db57a3612289] self._vmops.spawn(context, instance, image_meta, injected_files, [ 648.132337] env[61962]: ERROR nova.compute.manager [instance: 495acb0f-5b45-457e-9218-db57a3612289] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 648.132337] env[61962]: ERROR nova.compute.manager [instance: 495acb0f-5b45-457e-9218-db57a3612289] self._fetch_image_if_missing(context, vi) [ 648.132337] env[61962]: ERROR nova.compute.manager [instance: 495acb0f-5b45-457e-9218-db57a3612289] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 648.132337] env[61962]: ERROR nova.compute.manager [instance: 495acb0f-5b45-457e-9218-db57a3612289] image_cache(vi, tmp_image_ds_loc) [ 648.132337] env[61962]: ERROR nova.compute.manager [instance: 495acb0f-5b45-457e-9218-db57a3612289] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 648.132733] env[61962]: ERROR nova.compute.manager [instance: 495acb0f-5b45-457e-9218-db57a3612289] vm_util.copy_virtual_disk( [ 648.132733] env[61962]: ERROR nova.compute.manager [instance: 495acb0f-5b45-457e-9218-db57a3612289] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 648.132733] env[61962]: ERROR nova.compute.manager [instance: 495acb0f-5b45-457e-9218-db57a3612289] session._wait_for_task(vmdk_copy_task) [ 648.132733] env[61962]: ERROR nova.compute.manager [instance: 495acb0f-5b45-457e-9218-db57a3612289] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 648.132733] env[61962]: ERROR nova.compute.manager [instance: 495acb0f-5b45-457e-9218-db57a3612289] return self.wait_for_task(task_ref) [ 648.132733] env[61962]: ERROR nova.compute.manager [instance: 495acb0f-5b45-457e-9218-db57a3612289] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 648.132733] env[61962]: ERROR nova.compute.manager [instance: 495acb0f-5b45-457e-9218-db57a3612289] return evt.wait() [ 648.132733] env[61962]: ERROR nova.compute.manager [instance: 495acb0f-5b45-457e-9218-db57a3612289] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 648.132733] env[61962]: ERROR nova.compute.manager [instance: 495acb0f-5b45-457e-9218-db57a3612289] result = hub.switch() [ 648.132733] env[61962]: ERROR nova.compute.manager [instance: 495acb0f-5b45-457e-9218-db57a3612289] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 648.132733] env[61962]: ERROR nova.compute.manager [instance: 495acb0f-5b45-457e-9218-db57a3612289] return self.greenlet.switch() [ 648.132733] env[61962]: ERROR nova.compute.manager [instance: 495acb0f-5b45-457e-9218-db57a3612289] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 648.132733] env[61962]: ERROR nova.compute.manager [instance: 495acb0f-5b45-457e-9218-db57a3612289] self.f(*self.args, **self.kw) [ 648.133229] env[61962]: ERROR nova.compute.manager [instance: 495acb0f-5b45-457e-9218-db57a3612289] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 648.133229] env[61962]: ERROR nova.compute.manager [instance: 495acb0f-5b45-457e-9218-db57a3612289] raise exceptions.translate_fault(task_info.error) [ 648.133229] env[61962]: ERROR nova.compute.manager [instance: 495acb0f-5b45-457e-9218-db57a3612289] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 648.133229] env[61962]: ERROR nova.compute.manager [instance: 495acb0f-5b45-457e-9218-db57a3612289] Faults: ['InvalidArgument'] [ 648.133229] env[61962]: ERROR nova.compute.manager [instance: 495acb0f-5b45-457e-9218-db57a3612289] [ 648.133229] env[61962]: DEBUG nova.compute.utils [None req-fbaae0eb-893e-49a1-93b8-ba5ae997b76b tempest-ServerDiagnosticsTest-151015285 tempest-ServerDiagnosticsTest-151015285-project-member] [instance: 495acb0f-5b45-457e-9218-db57a3612289] VimFaultException {{(pid=61962) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 648.136286] env[61962]: DEBUG nova.compute.manager [None req-fbaae0eb-893e-49a1-93b8-ba5ae997b76b tempest-ServerDiagnosticsTest-151015285 tempest-ServerDiagnosticsTest-151015285-project-member] [instance: 495acb0f-5b45-457e-9218-db57a3612289] Build of instance 495acb0f-5b45-457e-9218-db57a3612289 was re-scheduled: A specified parameter was not correct: fileType [ 648.136286] env[61962]: Faults: ['InvalidArgument'] {{(pid=61962) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2472}} [ 648.136676] env[61962]: DEBUG nova.compute.manager [None req-fbaae0eb-893e-49a1-93b8-ba5ae997b76b tempest-ServerDiagnosticsTest-151015285 tempest-ServerDiagnosticsTest-151015285-project-member] [instance: 495acb0f-5b45-457e-9218-db57a3612289] Unplugging VIFs for instance {{(pid=61962) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:2998}} [ 648.136853] env[61962]: DEBUG nova.compute.manager [None req-fbaae0eb-893e-49a1-93b8-ba5ae997b76b tempest-ServerDiagnosticsTest-151015285 tempest-ServerDiagnosticsTest-151015285-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=61962) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3021}} [ 648.137016] env[61962]: DEBUG nova.compute.manager [None req-fbaae0eb-893e-49a1-93b8-ba5ae997b76b tempest-ServerDiagnosticsTest-151015285 tempest-ServerDiagnosticsTest-151015285-project-member] [instance: 495acb0f-5b45-457e-9218-db57a3612289] Deallocating network for instance {{(pid=61962) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2281}} [ 648.137211] env[61962]: DEBUG nova.network.neutron [None req-fbaae0eb-893e-49a1-93b8-ba5ae997b76b tempest-ServerDiagnosticsTest-151015285 tempest-ServerDiagnosticsTest-151015285-project-member] [instance: 495acb0f-5b45-457e-9218-db57a3612289] deallocate_for_instance() {{(pid=61962) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 648.852544] env[61962]: DEBUG nova.network.neutron [None req-fbaae0eb-893e-49a1-93b8-ba5ae997b76b tempest-ServerDiagnosticsTest-151015285 tempest-ServerDiagnosticsTest-151015285-project-member] [instance: 495acb0f-5b45-457e-9218-db57a3612289] Updating instance_info_cache with network_info: [] {{(pid=61962) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 648.866895] env[61962]: INFO nova.compute.manager [None req-fbaae0eb-893e-49a1-93b8-ba5ae997b76b tempest-ServerDiagnosticsTest-151015285 tempest-ServerDiagnosticsTest-151015285-project-member] [instance: 495acb0f-5b45-457e-9218-db57a3612289] Took 0.73 seconds to deallocate network for instance. [ 648.997111] env[61962]: INFO nova.scheduler.client.report [None req-fbaae0eb-893e-49a1-93b8-ba5ae997b76b tempest-ServerDiagnosticsTest-151015285 tempest-ServerDiagnosticsTest-151015285-project-member] Deleted allocations for instance 495acb0f-5b45-457e-9218-db57a3612289 [ 649.016827] env[61962]: DEBUG oslo_concurrency.lockutils [None req-fbaae0eb-893e-49a1-93b8-ba5ae997b76b tempest-ServerDiagnosticsTest-151015285 tempest-ServerDiagnosticsTest-151015285-project-member] Lock "495acb0f-5b45-457e-9218-db57a3612289" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 102.596s {{(pid=61962) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 649.055396] env[61962]: DEBUG nova.compute.manager [None req-90db2dbd-038d-42b8-af4c-dd3300a68214 tempest-ServersV294TestFqdnHostnames-1341387601 tempest-ServersV294TestFqdnHostnames-1341387601-project-member] [instance: 6996289c-f881-4733-8943-98e3633e43ca] Starting instance... {{(pid=61962) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2424}} [ 649.129989] env[61962]: DEBUG oslo_concurrency.lockutils [None req-90db2dbd-038d-42b8-af4c-dd3300a68214 tempest-ServersV294TestFqdnHostnames-1341387601 tempest-ServersV294TestFqdnHostnames-1341387601-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61962) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 649.130263] env[61962]: DEBUG oslo_concurrency.lockutils [None req-90db2dbd-038d-42b8-af4c-dd3300a68214 tempest-ServersV294TestFqdnHostnames-1341387601 tempest-ServersV294TestFqdnHostnames-1341387601-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=61962) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 649.132028] env[61962]: INFO nova.compute.claims [None req-90db2dbd-038d-42b8-af4c-dd3300a68214 tempest-ServersV294TestFqdnHostnames-1341387601 tempest-ServersV294TestFqdnHostnames-1341387601-project-member] [instance: 6996289c-f881-4733-8943-98e3633e43ca] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 649.629026] env[61962]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-06128bdd-dd09-404b-b277-c388115155ea {{(pid=61962) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 649.636564] env[61962]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-58b69b40-e498-478f-a132-73c4323c6eff {{(pid=61962) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 649.668702] env[61962]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c3005567-4e6b-480e-a8b3-6ab144b60032 {{(pid=61962) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 649.677178] env[61962]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fc4395b5-b667-4e01-9de7-24fd73ba1a6a {{(pid=61962) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 649.692165] env[61962]: DEBUG nova.compute.provider_tree [None req-90db2dbd-038d-42b8-af4c-dd3300a68214 tempest-ServersV294TestFqdnHostnames-1341387601 tempest-ServersV294TestFqdnHostnames-1341387601-project-member] Inventory has not changed in ProviderTree for provider: 5a20dc57-fddd-49ec-bab5-953a03eebaa1 {{(pid=61962) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 649.701334] env[61962]: DEBUG nova.scheduler.client.report [None req-90db2dbd-038d-42b8-af4c-dd3300a68214 tempest-ServersV294TestFqdnHostnames-1341387601 tempest-ServersV294TestFqdnHostnames-1341387601-project-member] Inventory has not changed for provider 5a20dc57-fddd-49ec-bab5-953a03eebaa1 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 96, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61962) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 649.718780] env[61962]: DEBUG oslo_concurrency.lockutils [None req-90db2dbd-038d-42b8-af4c-dd3300a68214 tempest-ServersV294TestFqdnHostnames-1341387601 tempest-ServersV294TestFqdnHostnames-1341387601-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.588s {{(pid=61962) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 649.719302] env[61962]: DEBUG nova.compute.manager [None req-90db2dbd-038d-42b8-af4c-dd3300a68214 tempest-ServersV294TestFqdnHostnames-1341387601 tempest-ServersV294TestFqdnHostnames-1341387601-project-member] [instance: 6996289c-f881-4733-8943-98e3633e43ca] Start building networks asynchronously for instance. {{(pid=61962) _build_resources /opt/stack/nova/nova/compute/manager.py:2821}} [ 649.758879] env[61962]: DEBUG nova.compute.utils [None req-90db2dbd-038d-42b8-af4c-dd3300a68214 tempest-ServersV294TestFqdnHostnames-1341387601 tempest-ServersV294TestFqdnHostnames-1341387601-project-member] Using /dev/sd instead of None {{(pid=61962) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 649.760671] env[61962]: DEBUG nova.compute.manager [None req-90db2dbd-038d-42b8-af4c-dd3300a68214 tempest-ServersV294TestFqdnHostnames-1341387601 tempest-ServersV294TestFqdnHostnames-1341387601-project-member] [instance: 6996289c-f881-4733-8943-98e3633e43ca] Allocating IP information in the background. {{(pid=61962) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1973}} [ 649.764037] env[61962]: DEBUG nova.network.neutron [None req-90db2dbd-038d-42b8-af4c-dd3300a68214 tempest-ServersV294TestFqdnHostnames-1341387601 tempest-ServersV294TestFqdnHostnames-1341387601-project-member] [instance: 6996289c-f881-4733-8943-98e3633e43ca] allocate_for_instance() {{(pid=61962) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 649.774729] env[61962]: DEBUG nova.compute.manager [None req-90db2dbd-038d-42b8-af4c-dd3300a68214 tempest-ServersV294TestFqdnHostnames-1341387601 tempest-ServersV294TestFqdnHostnames-1341387601-project-member] [instance: 6996289c-f881-4733-8943-98e3633e43ca] Start building block device mappings for instance. {{(pid=61962) _build_resources /opt/stack/nova/nova/compute/manager.py:2856}} [ 649.838149] env[61962]: DEBUG nova.policy [None req-90db2dbd-038d-42b8-af4c-dd3300a68214 tempest-ServersV294TestFqdnHostnames-1341387601 tempest-ServersV294TestFqdnHostnames-1341387601-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'be2c28c8eb8147d8a06d62cad148f800', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'b4cf0a2155644882b4bf42e69e210138', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61962) authorize /opt/stack/nova/nova/policy.py:203}} [ 649.861714] env[61962]: DEBUG nova.compute.manager [None req-90db2dbd-038d-42b8-af4c-dd3300a68214 tempest-ServersV294TestFqdnHostnames-1341387601 tempest-ServersV294TestFqdnHostnames-1341387601-project-member] [instance: 6996289c-f881-4733-8943-98e3633e43ca] Start spawning the instance on the hypervisor. {{(pid=61962) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2630}} [ 649.904764] env[61962]: DEBUG nova.virt.hardware [None req-90db2dbd-038d-42b8-af4c-dd3300a68214 tempest-ServersV294TestFqdnHostnames-1341387601 tempest-ServersV294TestFqdnHostnames-1341387601-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-12-01T12:09:23Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=128,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-12-01T12:09:07Z,direct_url=,disk_format='vmdk',id=f684bb17-3ab2-4bd5-a19e-4c36c57d0ebe,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='eef556fb5c4f49b889491ae31a496de5',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-12-01T12:09:08Z,virtual_size=,visibility=), allow threads: False {{(pid=61962) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 649.905038] env[61962]: DEBUG nova.virt.hardware [None req-90db2dbd-038d-42b8-af4c-dd3300a68214 tempest-ServersV294TestFqdnHostnames-1341387601 tempest-ServersV294TestFqdnHostnames-1341387601-project-member] Flavor limits 0:0:0 {{(pid=61962) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 649.905203] env[61962]: DEBUG nova.virt.hardware [None req-90db2dbd-038d-42b8-af4c-dd3300a68214 tempest-ServersV294TestFqdnHostnames-1341387601 tempest-ServersV294TestFqdnHostnames-1341387601-project-member] Image limits 0:0:0 {{(pid=61962) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 649.905387] env[61962]: DEBUG nova.virt.hardware [None req-90db2dbd-038d-42b8-af4c-dd3300a68214 tempest-ServersV294TestFqdnHostnames-1341387601 tempest-ServersV294TestFqdnHostnames-1341387601-project-member] Flavor pref 0:0:0 {{(pid=61962) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 649.905566] env[61962]: DEBUG nova.virt.hardware [None req-90db2dbd-038d-42b8-af4c-dd3300a68214 tempest-ServersV294TestFqdnHostnames-1341387601 tempest-ServersV294TestFqdnHostnames-1341387601-project-member] Image pref 0:0:0 {{(pid=61962) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 649.905738] env[61962]: DEBUG nova.virt.hardware [None req-90db2dbd-038d-42b8-af4c-dd3300a68214 tempest-ServersV294TestFqdnHostnames-1341387601 tempest-ServersV294TestFqdnHostnames-1341387601-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61962) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 649.906254] env[61962]: DEBUG nova.virt.hardware [None req-90db2dbd-038d-42b8-af4c-dd3300a68214 tempest-ServersV294TestFqdnHostnames-1341387601 tempest-ServersV294TestFqdnHostnames-1341387601-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61962) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 649.906370] env[61962]: DEBUG nova.virt.hardware [None req-90db2dbd-038d-42b8-af4c-dd3300a68214 tempest-ServersV294TestFqdnHostnames-1341387601 tempest-ServersV294TestFqdnHostnames-1341387601-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61962) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 649.906653] env[61962]: DEBUG nova.virt.hardware [None req-90db2dbd-038d-42b8-af4c-dd3300a68214 tempest-ServersV294TestFqdnHostnames-1341387601 tempest-ServersV294TestFqdnHostnames-1341387601-project-member] Got 1 possible topologies {{(pid=61962) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 649.906893] env[61962]: DEBUG nova.virt.hardware [None req-90db2dbd-038d-42b8-af4c-dd3300a68214 tempest-ServersV294TestFqdnHostnames-1341387601 tempest-ServersV294TestFqdnHostnames-1341387601-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61962) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 649.907106] env[61962]: DEBUG nova.virt.hardware [None req-90db2dbd-038d-42b8-af4c-dd3300a68214 tempest-ServersV294TestFqdnHostnames-1341387601 tempest-ServersV294TestFqdnHostnames-1341387601-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61962) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 649.908023] env[61962]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d88720d3-d4c4-4a1b-b236-ada75c269f1d {{(pid=61962) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 649.917054] env[61962]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5e7abecb-5674-4aef-af46-641300bb80cd {{(pid=61962) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 650.429954] env[61962]: DEBUG nova.network.neutron [None req-90db2dbd-038d-42b8-af4c-dd3300a68214 tempest-ServersV294TestFqdnHostnames-1341387601 tempest-ServersV294TestFqdnHostnames-1341387601-project-member] [instance: 6996289c-f881-4733-8943-98e3633e43ca] Successfully created port: a44f0318-04ca-4f39-ba6e-11403747065e {{(pid=61962) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 651.576440] env[61962]: DEBUG nova.compute.manager [req-9fb298be-1a86-4e92-9797-40f36ecfeb33 req-c794bb9a-1caa-4f2d-9e6c-322a613840e5 service nova] [instance: 6996289c-f881-4733-8943-98e3633e43ca] Received event network-vif-plugged-a44f0318-04ca-4f39-ba6e-11403747065e {{(pid=61962) external_instance_event /opt/stack/nova/nova/compute/manager.py:11210}} [ 651.576440] env[61962]: DEBUG oslo_concurrency.lockutils [req-9fb298be-1a86-4e92-9797-40f36ecfeb33 req-c794bb9a-1caa-4f2d-9e6c-322a613840e5 service nova] Acquiring lock "6996289c-f881-4733-8943-98e3633e43ca-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=61962) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 651.576440] env[61962]: DEBUG oslo_concurrency.lockutils [req-9fb298be-1a86-4e92-9797-40f36ecfeb33 req-c794bb9a-1caa-4f2d-9e6c-322a613840e5 service nova] Lock "6996289c-f881-4733-8943-98e3633e43ca-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=61962) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 651.576440] env[61962]: DEBUG oslo_concurrency.lockutils [req-9fb298be-1a86-4e92-9797-40f36ecfeb33 req-c794bb9a-1caa-4f2d-9e6c-322a613840e5 service nova] Lock "6996289c-f881-4733-8943-98e3633e43ca-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=61962) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 651.577371] env[61962]: DEBUG nova.compute.manager [req-9fb298be-1a86-4e92-9797-40f36ecfeb33 req-c794bb9a-1caa-4f2d-9e6c-322a613840e5 service nova] [instance: 6996289c-f881-4733-8943-98e3633e43ca] No waiting events found dispatching network-vif-plugged-a44f0318-04ca-4f39-ba6e-11403747065e {{(pid=61962) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 651.577371] env[61962]: WARNING nova.compute.manager [req-9fb298be-1a86-4e92-9797-40f36ecfeb33 req-c794bb9a-1caa-4f2d-9e6c-322a613840e5 service nova] [instance: 6996289c-f881-4733-8943-98e3633e43ca] Received unexpected event network-vif-plugged-a44f0318-04ca-4f39-ba6e-11403747065e for instance with vm_state building and task_state spawning. [ 651.583778] env[61962]: DEBUG nova.network.neutron [None req-90db2dbd-038d-42b8-af4c-dd3300a68214 tempest-ServersV294TestFqdnHostnames-1341387601 tempest-ServersV294TestFqdnHostnames-1341387601-project-member] [instance: 6996289c-f881-4733-8943-98e3633e43ca] Successfully updated port: a44f0318-04ca-4f39-ba6e-11403747065e {{(pid=61962) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 651.598993] env[61962]: DEBUG oslo_concurrency.lockutils [None req-90db2dbd-038d-42b8-af4c-dd3300a68214 tempest-ServersV294TestFqdnHostnames-1341387601 tempest-ServersV294TestFqdnHostnames-1341387601-project-member] Acquiring lock "refresh_cache-6996289c-f881-4733-8943-98e3633e43ca" {{(pid=61962) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 651.599165] env[61962]: DEBUG oslo_concurrency.lockutils [None req-90db2dbd-038d-42b8-af4c-dd3300a68214 tempest-ServersV294TestFqdnHostnames-1341387601 tempest-ServersV294TestFqdnHostnames-1341387601-project-member] Acquired lock "refresh_cache-6996289c-f881-4733-8943-98e3633e43ca" {{(pid=61962) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 651.599314] env[61962]: DEBUG nova.network.neutron [None req-90db2dbd-038d-42b8-af4c-dd3300a68214 tempest-ServersV294TestFqdnHostnames-1341387601 tempest-ServersV294TestFqdnHostnames-1341387601-project-member] [instance: 6996289c-f881-4733-8943-98e3633e43ca] Building network info cache for instance {{(pid=61962) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 651.665161] env[61962]: DEBUG nova.network.neutron [None req-90db2dbd-038d-42b8-af4c-dd3300a68214 tempest-ServersV294TestFqdnHostnames-1341387601 tempest-ServersV294TestFqdnHostnames-1341387601-project-member] [instance: 6996289c-f881-4733-8943-98e3633e43ca] Instance cache missing network info. {{(pid=61962) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 651.940577] env[61962]: DEBUG nova.network.neutron [None req-90db2dbd-038d-42b8-af4c-dd3300a68214 tempest-ServersV294TestFqdnHostnames-1341387601 tempest-ServersV294TestFqdnHostnames-1341387601-project-member] [instance: 6996289c-f881-4733-8943-98e3633e43ca] Updating instance_info_cache with network_info: [{"id": "a44f0318-04ca-4f39-ba6e-11403747065e", "address": "fa:16:3e:d7:99:3f", "network": {"id": "d75c8968-9118-4042-be7f-dd8c7f281e4d", "bridge": "br-int", "label": "tempest-ServersV294TestFqdnHostnames-1871292718-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "b4cf0a2155644882b4bf42e69e210138", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "56398cc0-e39f-410f-8036-8c2a6870e26f", "external-id": "nsx-vlan-transportzone-612", "segmentation_id": 612, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapa44f0318-04", "ovs_interfaceid": "a44f0318-04ca-4f39-ba6e-11403747065e", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61962) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 651.957020] env[61962]: DEBUG oslo_concurrency.lockutils [None req-90db2dbd-038d-42b8-af4c-dd3300a68214 tempest-ServersV294TestFqdnHostnames-1341387601 tempest-ServersV294TestFqdnHostnames-1341387601-project-member] Releasing lock "refresh_cache-6996289c-f881-4733-8943-98e3633e43ca" {{(pid=61962) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 651.957365] env[61962]: DEBUG nova.compute.manager [None req-90db2dbd-038d-42b8-af4c-dd3300a68214 tempest-ServersV294TestFqdnHostnames-1341387601 tempest-ServersV294TestFqdnHostnames-1341387601-project-member] [instance: 6996289c-f881-4733-8943-98e3633e43ca] Instance network_info: |[{"id": "a44f0318-04ca-4f39-ba6e-11403747065e", "address": "fa:16:3e:d7:99:3f", "network": {"id": "d75c8968-9118-4042-be7f-dd8c7f281e4d", "bridge": "br-int", "label": "tempest-ServersV294TestFqdnHostnames-1871292718-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "b4cf0a2155644882b4bf42e69e210138", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "56398cc0-e39f-410f-8036-8c2a6870e26f", "external-id": "nsx-vlan-transportzone-612", "segmentation_id": 612, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapa44f0318-04", "ovs_interfaceid": "a44f0318-04ca-4f39-ba6e-11403747065e", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=61962) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 651.957793] env[61962]: DEBUG nova.virt.vmwareapi.vmops [None req-90db2dbd-038d-42b8-af4c-dd3300a68214 tempest-ServersV294TestFqdnHostnames-1341387601 tempest-ServersV294TestFqdnHostnames-1341387601-project-member] [instance: 6996289c-f881-4733-8943-98e3633e43ca] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:d7:99:3f', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '56398cc0-e39f-410f-8036-8c2a6870e26f', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'a44f0318-04ca-4f39-ba6e-11403747065e', 'vif_model': 'vmxnet3'}] {{(pid=61962) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 651.966198] env[61962]: DEBUG nova.virt.vmwareapi.vm_util [None req-90db2dbd-038d-42b8-af4c-dd3300a68214 tempest-ServersV294TestFqdnHostnames-1341387601 tempest-ServersV294TestFqdnHostnames-1341387601-project-member] Creating folder: Project (b4cf0a2155644882b4bf42e69e210138). Parent ref: group-v953327. {{(pid=61962) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 651.966673] env[61962]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-5a85a136-3334-4be4-9f1e-062338204fd6 {{(pid=61962) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 651.978519] env[61962]: INFO nova.virt.vmwareapi.vm_util [None req-90db2dbd-038d-42b8-af4c-dd3300a68214 tempest-ServersV294TestFqdnHostnames-1341387601 tempest-ServersV294TestFqdnHostnames-1341387601-project-member] Created folder: Project (b4cf0a2155644882b4bf42e69e210138) in parent group-v953327. [ 651.978736] env[61962]: DEBUG nova.virt.vmwareapi.vm_util [None req-90db2dbd-038d-42b8-af4c-dd3300a68214 tempest-ServersV294TestFqdnHostnames-1341387601 tempest-ServersV294TestFqdnHostnames-1341387601-project-member] Creating folder: Instances. Parent ref: group-v953365. {{(pid=61962) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 651.978982] env[61962]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-e623c9bc-57aa-4656-86df-2c696e53804d {{(pid=61962) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 651.990258] env[61962]: INFO nova.virt.vmwareapi.vm_util [None req-90db2dbd-038d-42b8-af4c-dd3300a68214 tempest-ServersV294TestFqdnHostnames-1341387601 tempest-ServersV294TestFqdnHostnames-1341387601-project-member] Created folder: Instances in parent group-v953365. [ 651.990517] env[61962]: DEBUG oslo.service.loopingcall [None req-90db2dbd-038d-42b8-af4c-dd3300a68214 tempest-ServersV294TestFqdnHostnames-1341387601 tempest-ServersV294TestFqdnHostnames-1341387601-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=61962) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 651.990716] env[61962]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 6996289c-f881-4733-8943-98e3633e43ca] Creating VM on the ESX host {{(pid=61962) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 651.994021] env[61962]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-1cedb0a0-a52e-4ab4-b108-ebd7f5e5d13c {{(pid=61962) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 652.013906] env[61962]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 652.013906] env[61962]: value = "task-4891951" [ 652.013906] env[61962]: _type = "Task" [ 652.013906] env[61962]: } to complete. {{(pid=61962) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 652.022732] env[61962]: DEBUG oslo_vmware.api [-] Task: {'id': task-4891951, 'name': CreateVM_Task} progress is 0%. {{(pid=61962) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 652.530618] env[61962]: DEBUG oslo_vmware.api [-] Task: {'id': task-4891951, 'name': CreateVM_Task} progress is 99%. {{(pid=61962) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 653.028186] env[61962]: DEBUG oslo_vmware.api [-] Task: {'id': task-4891951, 'name': CreateVM_Task} progress is 99%. {{(pid=61962) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 653.529236] env[61962]: DEBUG oslo_vmware.api [-] Task: {'id': task-4891951, 'name': CreateVM_Task, 'duration_secs': 1.385456} completed successfully. {{(pid=61962) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 653.529236] env[61962]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 6996289c-f881-4733-8943-98e3633e43ca] Created VM on the ESX host {{(pid=61962) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 653.529236] env[61962]: DEBUG oslo_concurrency.lockutils [None req-90db2dbd-038d-42b8-af4c-dd3300a68214 tempest-ServersV294TestFqdnHostnames-1341387601 tempest-ServersV294TestFqdnHostnames-1341387601-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/f684bb17-3ab2-4bd5-a19e-4c36c57d0ebe" {{(pid=61962) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 653.529236] env[61962]: DEBUG oslo_concurrency.lockutils [None req-90db2dbd-038d-42b8-af4c-dd3300a68214 tempest-ServersV294TestFqdnHostnames-1341387601 tempest-ServersV294TestFqdnHostnames-1341387601-project-member] Acquired lock "[datastore2] devstack-image-cache_base/f684bb17-3ab2-4bd5-a19e-4c36c57d0ebe" {{(pid=61962) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 653.529236] env[61962]: DEBUG oslo_concurrency.lockutils [None req-90db2dbd-038d-42b8-af4c-dd3300a68214 tempest-ServersV294TestFqdnHostnames-1341387601 tempest-ServersV294TestFqdnHostnames-1341387601-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/f684bb17-3ab2-4bd5-a19e-4c36c57d0ebe" {{(pid=61962) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 653.529395] env[61962]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-308a1bbb-7327-4000-87a1-98d8007a334c {{(pid=61962) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 653.535254] env[61962]: DEBUG oslo_vmware.api [None req-90db2dbd-038d-42b8-af4c-dd3300a68214 tempest-ServersV294TestFqdnHostnames-1341387601 tempest-ServersV294TestFqdnHostnames-1341387601-project-member] Waiting for the task: (returnval){ [ 653.535254] env[61962]: value = "session[5210918f-aadc-9b29-42fa-0929c2e42627]52db1f1a-9d32-aeae-7552-4de042b04d98" [ 653.535254] env[61962]: _type = "Task" [ 653.535254] env[61962]: } to complete. {{(pid=61962) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 653.545567] env[61962]: DEBUG oslo_vmware.api [None req-90db2dbd-038d-42b8-af4c-dd3300a68214 tempest-ServersV294TestFqdnHostnames-1341387601 tempest-ServersV294TestFqdnHostnames-1341387601-project-member] Task: {'id': session[5210918f-aadc-9b29-42fa-0929c2e42627]52db1f1a-9d32-aeae-7552-4de042b04d98, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61962) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 653.815312] env[61962]: DEBUG nova.compute.manager [req-ca9be841-4216-49ed-8cae-3c1664884414 req-05220aec-aad6-47ff-8c62-2ed80149b5cd service nova] [instance: 6996289c-f881-4733-8943-98e3633e43ca] Received event network-changed-a44f0318-04ca-4f39-ba6e-11403747065e {{(pid=61962) external_instance_event /opt/stack/nova/nova/compute/manager.py:11210}} [ 653.815396] env[61962]: DEBUG nova.compute.manager [req-ca9be841-4216-49ed-8cae-3c1664884414 req-05220aec-aad6-47ff-8c62-2ed80149b5cd service nova] [instance: 6996289c-f881-4733-8943-98e3633e43ca] Refreshing instance network info cache due to event network-changed-a44f0318-04ca-4f39-ba6e-11403747065e. {{(pid=61962) external_instance_event /opt/stack/nova/nova/compute/manager.py:11215}} [ 653.815841] env[61962]: DEBUG oslo_concurrency.lockutils [req-ca9be841-4216-49ed-8cae-3c1664884414 req-05220aec-aad6-47ff-8c62-2ed80149b5cd service nova] Acquiring lock "refresh_cache-6996289c-f881-4733-8943-98e3633e43ca" {{(pid=61962) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 653.815841] env[61962]: DEBUG oslo_concurrency.lockutils [req-ca9be841-4216-49ed-8cae-3c1664884414 req-05220aec-aad6-47ff-8c62-2ed80149b5cd service nova] Acquired lock "refresh_cache-6996289c-f881-4733-8943-98e3633e43ca" {{(pid=61962) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 653.817043] env[61962]: DEBUG nova.network.neutron [req-ca9be841-4216-49ed-8cae-3c1664884414 req-05220aec-aad6-47ff-8c62-2ed80149b5cd service nova] [instance: 6996289c-f881-4733-8943-98e3633e43ca] Refreshing network info cache for port a44f0318-04ca-4f39-ba6e-11403747065e {{(pid=61962) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 654.046806] env[61962]: DEBUG oslo_concurrency.lockutils [None req-90db2dbd-038d-42b8-af4c-dd3300a68214 tempest-ServersV294TestFqdnHostnames-1341387601 tempest-ServersV294TestFqdnHostnames-1341387601-project-member] Releasing lock "[datastore2] devstack-image-cache_base/f684bb17-3ab2-4bd5-a19e-4c36c57d0ebe" {{(pid=61962) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 654.047208] env[61962]: DEBUG nova.virt.vmwareapi.vmops [None req-90db2dbd-038d-42b8-af4c-dd3300a68214 tempest-ServersV294TestFqdnHostnames-1341387601 tempest-ServersV294TestFqdnHostnames-1341387601-project-member] [instance: 6996289c-f881-4733-8943-98e3633e43ca] Processing image f684bb17-3ab2-4bd5-a19e-4c36c57d0ebe {{(pid=61962) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 654.047208] env[61962]: DEBUG oslo_concurrency.lockutils [None req-90db2dbd-038d-42b8-af4c-dd3300a68214 tempest-ServersV294TestFqdnHostnames-1341387601 tempest-ServersV294TestFqdnHostnames-1341387601-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/f684bb17-3ab2-4bd5-a19e-4c36c57d0ebe/f684bb17-3ab2-4bd5-a19e-4c36c57d0ebe.vmdk" {{(pid=61962) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 654.229250] env[61962]: DEBUG nova.network.neutron [req-ca9be841-4216-49ed-8cae-3c1664884414 req-05220aec-aad6-47ff-8c62-2ed80149b5cd service nova] [instance: 6996289c-f881-4733-8943-98e3633e43ca] Updated VIF entry in instance network info cache for port a44f0318-04ca-4f39-ba6e-11403747065e. {{(pid=61962) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 654.229615] env[61962]: DEBUG nova.network.neutron [req-ca9be841-4216-49ed-8cae-3c1664884414 req-05220aec-aad6-47ff-8c62-2ed80149b5cd service nova] [instance: 6996289c-f881-4733-8943-98e3633e43ca] Updating instance_info_cache with network_info: [{"id": "a44f0318-04ca-4f39-ba6e-11403747065e", "address": "fa:16:3e:d7:99:3f", "network": {"id": "d75c8968-9118-4042-be7f-dd8c7f281e4d", "bridge": "br-int", "label": "tempest-ServersV294TestFqdnHostnames-1871292718-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "b4cf0a2155644882b4bf42e69e210138", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "56398cc0-e39f-410f-8036-8c2a6870e26f", "external-id": "nsx-vlan-transportzone-612", "segmentation_id": 612, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapa44f0318-04", "ovs_interfaceid": "a44f0318-04ca-4f39-ba6e-11403747065e", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61962) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 654.247714] env[61962]: DEBUG oslo_concurrency.lockutils [req-ca9be841-4216-49ed-8cae-3c1664884414 req-05220aec-aad6-47ff-8c62-2ed80149b5cd service nova] Releasing lock "refresh_cache-6996289c-f881-4733-8943-98e3633e43ca" {{(pid=61962) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 657.251850] env[61962]: DEBUG oslo_concurrency.lockutils [None req-39280a80-f6a9-4818-9c77-d5be7d130e72 tempest-ServersTestFqdnHostnames-1235394237 tempest-ServersTestFqdnHostnames-1235394237-project-member] Acquiring lock "bf82c44c-95ce-4e4c-a32f-444bba1ef5cd" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61962) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 657.252246] env[61962]: DEBUG oslo_concurrency.lockutils [None req-39280a80-f6a9-4818-9c77-d5be7d130e72 tempest-ServersTestFqdnHostnames-1235394237 tempest-ServersTestFqdnHostnames-1235394237-project-member] Lock "bf82c44c-95ce-4e4c-a32f-444bba1ef5cd" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=61962) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 679.158949] env[61962]: DEBUG oslo_service.periodic_task [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=61962) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 680.153855] env[61962]: DEBUG oslo_service.periodic_task [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=61962) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 680.158464] env[61962]: DEBUG oslo_service.periodic_task [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=61962) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 680.158553] env[61962]: DEBUG oslo_service.periodic_task [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=61962) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 680.158691] env[61962]: DEBUG nova.compute.manager [None req-ba337279-4320-40ca-b616-7e590432f203 None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=61962) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10626}} [ 680.158842] env[61962]: DEBUG oslo_service.periodic_task [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Running periodic task ComputeManager.update_available_resource {{(pid=61962) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 680.171461] env[61962]: DEBUG oslo_concurrency.lockutils [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=61962) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 680.171733] env[61962]: DEBUG oslo_concurrency.lockutils [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=61962) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 680.171878] env[61962]: DEBUG oslo_concurrency.lockutils [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=61962) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 680.172052] env[61962]: DEBUG nova.compute.resource_tracker [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=61962) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 680.173196] env[61962]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-04b8b075-d260-410e-ad92-3703dd5ec5fd {{(pid=61962) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 680.182451] env[61962]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-86129849-7a10-4edf-a9ad-41a20f80b1f2 {{(pid=61962) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 680.197870] env[61962]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3edfe6f4-84a1-40e2-8bb2-b60728b2128c {{(pid=61962) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 680.205155] env[61962]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-eba625d5-3a4f-4ca9-aa96-6bf1a072f01c {{(pid=61962) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 680.235857] env[61962]: DEBUG nova.compute.resource_tracker [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=180554MB free_disk=96GB free_vcpus=48 pci_devices=None {{(pid=61962) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 680.235963] env[61962]: DEBUG oslo_concurrency.lockutils [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=61962) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 680.236154] env[61962]: DEBUG oslo_concurrency.lockutils [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=61962) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 680.320410] env[61962]: DEBUG nova.compute.resource_tracker [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Instance dc6c9cb6-419f-4122-8351-0c517fb8616d actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61962) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 680.320410] env[61962]: DEBUG nova.compute.resource_tracker [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Instance 53921884-f617-4020-b779-650becf89a58 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61962) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 680.320410] env[61962]: DEBUG nova.compute.resource_tracker [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Instance 6ba9719c-3689-4db6-8ab2-441dc52c7bfd actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61962) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 680.320410] env[61962]: DEBUG nova.compute.resource_tracker [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Instance 1a7d5a5f-c5b1-40d2-828b-090fb8e76e3d actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61962) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 680.320618] env[61962]: DEBUG nova.compute.resource_tracker [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Instance d0df932e-822b-4c60-a588-445aa2f14cdb actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61962) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 680.320618] env[61962]: DEBUG nova.compute.resource_tracker [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Instance a6e6f30d-c667-4f70-bf60-596a8fe97188 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61962) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 680.320618] env[61962]: DEBUG nova.compute.resource_tracker [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Instance b9782756-27e1-46fc-940f-069cfb9ab9f9 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61962) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 680.320724] env[61962]: DEBUG nova.compute.resource_tracker [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Instance 667f56c0-896f-4e25-9e8a-3954f7733dc0 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61962) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 680.320773] env[61962]: DEBUG nova.compute.resource_tracker [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Instance 295064f0-9677-4287-877a-ef2a33a87fef actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61962) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 680.320887] env[61962]: DEBUG nova.compute.resource_tracker [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Instance 6996289c-f881-4733-8943-98e3633e43ca actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61962) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 680.334831] env[61962]: DEBUG nova.compute.resource_tracker [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Instance 85c6b6af-f1b5-47ca-8e02-61c3669ec02b has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61962) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1767}} [ 680.346559] env[61962]: DEBUG nova.compute.resource_tracker [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Instance 0aa340f7-a219-47f5-9d7d-dd4062643cf7 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61962) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1767}} [ 680.358854] env[61962]: DEBUG nova.compute.resource_tracker [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Instance 968692bd-12ae-46aa-b1de-5cd312f436ee has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61962) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1767}} [ 680.373711] env[61962]: DEBUG nova.compute.resource_tracker [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Instance 09098197-59d7-4e14-b76c-8366a1ea5fd6 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61962) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1767}} [ 680.386041] env[61962]: DEBUG nova.compute.resource_tracker [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Instance 4fcdc4fd-b1f8-4188-a502-49cf675c9bdd has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61962) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1767}} [ 680.397550] env[61962]: DEBUG nova.compute.resource_tracker [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Instance d6fc81f2-0fa6-4e9c-8d1b-60f0038112b0 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61962) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1767}} [ 680.409438] env[61962]: DEBUG nova.compute.resource_tracker [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Instance 511ce01a-0bea-49ba-b0e3-2a006ee88d32 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61962) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1767}} [ 680.421025] env[61962]: DEBUG nova.compute.resource_tracker [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Instance b241f341-8413-4f6f-b054-d9209640d652 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61962) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1767}} [ 680.431443] env[61962]: DEBUG nova.compute.resource_tracker [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Instance cfc08011-0291-450a-96eb-c4f8002e07e5 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61962) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1767}} [ 680.443293] env[61962]: DEBUG nova.compute.resource_tracker [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Instance a20846de-721a-4bb9-9826-e7c7ebf170c4 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61962) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1767}} [ 680.454362] env[61962]: DEBUG nova.compute.resource_tracker [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Instance ff7d9da5-bc9a-41f2-a2e4-521d87b22117 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61962) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1767}} [ 680.467307] env[61962]: DEBUG nova.compute.resource_tracker [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Instance 2dad2cb0-56e2-4fb6-b540-0fd98265697f has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61962) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1767}} [ 680.478199] env[61962]: DEBUG nova.compute.resource_tracker [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Instance 8436449e-8f8d-4fd0-96be-31f1464f03ae has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61962) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1767}} [ 680.489088] env[61962]: DEBUG nova.compute.resource_tracker [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Instance 92f66377-ddec-4312-b10c-f7edc4855d48 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61962) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1767}} [ 680.500116] env[61962]: DEBUG nova.compute.resource_tracker [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Instance 17bc2689-2250-48bc-bc0e-47821b20d3ba has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61962) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1767}} [ 680.511464] env[61962]: DEBUG nova.compute.resource_tracker [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Instance 146b08c3-3893-4c0e-9448-05a988c19ae8 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61962) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1767}} [ 680.522235] env[61962]: DEBUG nova.compute.resource_tracker [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Instance 2ef4b06a-c5c6-46ed-89d8-30143aa30a25 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61962) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1767}} [ 680.532508] env[61962]: DEBUG nova.compute.resource_tracker [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Instance 2142b6e8-d060-48eb-8ff1-3e92e6188a83 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61962) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1767}} [ 680.543699] env[61962]: DEBUG nova.compute.resource_tracker [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Instance dbe5937a-cf98-4abc-ad5a-521762e0be9d has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61962) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1767}} [ 680.555019] env[61962]: DEBUG nova.compute.resource_tracker [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Instance b387b903-2f5c-40ad-aeea-b013fb66e17d has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61962) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1767}} [ 680.564700] env[61962]: DEBUG nova.compute.resource_tracker [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Instance bf82c44c-95ce-4e4c-a32f-444bba1ef5cd has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61962) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1767}} [ 680.564974] env[61962]: DEBUG nova.compute.resource_tracker [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Total usable vcpus: 48, total allocated vcpus: 10 {{(pid=61962) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 680.565148] env[61962]: DEBUG nova.compute.resource_tracker [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=1792MB phys_disk=100GB used_disk=10GB total_vcpus=48 used_vcpus=10 pci_stats=[] stats={'failed_builds': '2', 'num_instances': '10', 'num_vm_building': '10', 'num_task_spawning': '10', 'num_os_type_None': '10', 'num_proj_c7a5eef5ba8d4ba88356ffba87aec75c': '1', 'io_workload': '10', 'num_proj_f1aff07cb3e74298b4e347bd4176fbf8': '1', 'num_proj_1dd984fe1bf84471ae103d28565d160b': '1', 'num_proj_13cf9ead87c749fc963fbe35b22b4fd6': '1', 'num_proj_e58b68272c404983993a09ad12e8c03b': '1', 'num_proj_6e7e747becdf4976b570b79d4b20d88b': '1', 'num_proj_a7b748ae1e6d4e17bc463811008c5596': '1', 'num_proj_6c00b1a76e464ad2a9788673e256de22': '1', 'num_proj_78e44aa9cd5642928627210447b3e177': '1', 'num_proj_b4cf0a2155644882b4bf42e69e210138': '1'} {{(pid=61962) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 680.943635] env[61962]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-02a75fda-a8b9-4e2b-b1d9-5fecd8bc8eb0 {{(pid=61962) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 680.952032] env[61962]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a0add3ab-eab7-47ca-89dc-8d770827bc22 {{(pid=61962) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 680.983062] env[61962]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f380d04e-8994-418c-8285-1ffb8df63512 {{(pid=61962) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 680.991086] env[61962]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d74bd283-9ba6-4ace-b81a-41630f1d3dcc {{(pid=61962) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 681.005154] env[61962]: DEBUG nova.compute.provider_tree [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Inventory has not changed in ProviderTree for provider: 5a20dc57-fddd-49ec-bab5-953a03eebaa1 {{(pid=61962) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 681.013744] env[61962]: DEBUG nova.scheduler.client.report [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Inventory has not changed for provider 5a20dc57-fddd-49ec-bab5-953a03eebaa1 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 96, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61962) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 681.029108] env[61962]: DEBUG nova.compute.resource_tracker [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=61962) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 681.029312] env[61962]: DEBUG oslo_concurrency.lockutils [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 0.793s {{(pid=61962) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 682.029794] env[61962]: DEBUG oslo_service.periodic_task [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=61962) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 682.031246] env[61962]: DEBUG nova.compute.manager [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Starting heal instance info cache {{(pid=61962) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10007}} [ 682.031246] env[61962]: DEBUG nova.compute.manager [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Rebuilding the list of instances to heal {{(pid=61962) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10011}} [ 682.052459] env[61962]: DEBUG nova.compute.manager [None req-ba337279-4320-40ca-b616-7e590432f203 None None] [instance: 53921884-f617-4020-b779-650becf89a58] Skipping network cache update for instance because it is Building. {{(pid=61962) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10020}} [ 682.052671] env[61962]: DEBUG nova.compute.manager [None req-ba337279-4320-40ca-b616-7e590432f203 None None] [instance: dc6c9cb6-419f-4122-8351-0c517fb8616d] Skipping network cache update for instance because it is Building. {{(pid=61962) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10020}} [ 682.052822] env[61962]: DEBUG nova.compute.manager [None req-ba337279-4320-40ca-b616-7e590432f203 None None] [instance: 6ba9719c-3689-4db6-8ab2-441dc52c7bfd] Skipping network cache update for instance because it is Building. {{(pid=61962) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10020}} [ 682.052980] env[61962]: DEBUG nova.compute.manager [None req-ba337279-4320-40ca-b616-7e590432f203 None None] [instance: 1a7d5a5f-c5b1-40d2-828b-090fb8e76e3d] Skipping network cache update for instance because it is Building. {{(pid=61962) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10020}} [ 682.053123] env[61962]: DEBUG nova.compute.manager [None req-ba337279-4320-40ca-b616-7e590432f203 None None] [instance: d0df932e-822b-4c60-a588-445aa2f14cdb] Skipping network cache update for instance because it is Building. {{(pid=61962) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10020}} [ 682.053248] env[61962]: DEBUG nova.compute.manager [None req-ba337279-4320-40ca-b616-7e590432f203 None None] [instance: a6e6f30d-c667-4f70-bf60-596a8fe97188] Skipping network cache update for instance because it is Building. {{(pid=61962) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10020}} [ 682.053370] env[61962]: DEBUG nova.compute.manager [None req-ba337279-4320-40ca-b616-7e590432f203 None None] [instance: b9782756-27e1-46fc-940f-069cfb9ab9f9] Skipping network cache update for instance because it is Building. {{(pid=61962) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10020}} [ 682.053489] env[61962]: DEBUG nova.compute.manager [None req-ba337279-4320-40ca-b616-7e590432f203 None None] [instance: 667f56c0-896f-4e25-9e8a-3954f7733dc0] Skipping network cache update for instance because it is Building. {{(pid=61962) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10020}} [ 682.053608] env[61962]: DEBUG nova.compute.manager [None req-ba337279-4320-40ca-b616-7e590432f203 None None] [instance: 295064f0-9677-4287-877a-ef2a33a87fef] Skipping network cache update for instance because it is Building. {{(pid=61962) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10020}} [ 682.053727] env[61962]: DEBUG nova.compute.manager [None req-ba337279-4320-40ca-b616-7e590432f203 None None] [instance: 6996289c-f881-4733-8943-98e3633e43ca] Skipping network cache update for instance because it is Building. {{(pid=61962) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10020}} [ 682.053849] env[61962]: DEBUG nova.compute.manager [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Didn't find any instances for network info cache update. {{(pid=61962) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10093}} [ 682.054346] env[61962]: DEBUG oslo_service.periodic_task [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=61962) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 682.158229] env[61962]: DEBUG oslo_service.periodic_task [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=61962) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 682.158229] env[61962]: DEBUG oslo_service.periodic_task [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=61962) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 693.191816] env[61962]: WARNING oslo_vmware.rw_handles [None req-29af0b05-c0ff-41ef-8103-714fd2cae28f tempest-ServerDiagnosticsNegativeTest-1190275590 tempest-ServerDiagnosticsNegativeTest-1190275590-project-member] Error occurred while reading the HTTP response.: http.client.RemoteDisconnected: Remote end closed connection without response [ 693.191816] env[61962]: ERROR oslo_vmware.rw_handles Traceback (most recent call last): [ 693.191816] env[61962]: ERROR oslo_vmware.rw_handles File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py", line 283, in close [ 693.191816] env[61962]: ERROR oslo_vmware.rw_handles self._conn.getresponse() [ 693.191816] env[61962]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 1375, in getresponse [ 693.191816] env[61962]: ERROR oslo_vmware.rw_handles response.begin() [ 693.191816] env[61962]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 318, in begin [ 693.191816] env[61962]: ERROR oslo_vmware.rw_handles version, status, reason = self._read_status() [ 693.191816] env[61962]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 287, in _read_status [ 693.191816] env[61962]: ERROR oslo_vmware.rw_handles raise RemoteDisconnected("Remote end closed connection without" [ 693.191816] env[61962]: ERROR oslo_vmware.rw_handles http.client.RemoteDisconnected: Remote end closed connection without response [ 693.191816] env[61962]: ERROR oslo_vmware.rw_handles [ 693.192766] env[61962]: DEBUG nova.virt.vmwareapi.images [None req-29af0b05-c0ff-41ef-8103-714fd2cae28f tempest-ServerDiagnosticsNegativeTest-1190275590 tempest-ServerDiagnosticsNegativeTest-1190275590-project-member] [instance: 53921884-f617-4020-b779-650becf89a58] Downloaded image file data f684bb17-3ab2-4bd5-a19e-4c36c57d0ebe to vmware_temp/1a7f168f-47fe-419c-9b06-3e30ff328019/f684bb17-3ab2-4bd5-a19e-4c36c57d0ebe/tmp-sparse.vmdk on the data store datastore2 {{(pid=61962) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:258}} [ 693.194037] env[61962]: DEBUG nova.virt.vmwareapi.vmops [None req-29af0b05-c0ff-41ef-8103-714fd2cae28f tempest-ServerDiagnosticsNegativeTest-1190275590 tempest-ServerDiagnosticsNegativeTest-1190275590-project-member] [instance: 53921884-f617-4020-b779-650becf89a58] Caching image {{(pid=61962) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 693.194325] env[61962]: DEBUG nova.virt.vmwareapi.vm_util [None req-29af0b05-c0ff-41ef-8103-714fd2cae28f tempest-ServerDiagnosticsNegativeTest-1190275590 tempest-ServerDiagnosticsNegativeTest-1190275590-project-member] Copying Virtual Disk [datastore2] vmware_temp/1a7f168f-47fe-419c-9b06-3e30ff328019/f684bb17-3ab2-4bd5-a19e-4c36c57d0ebe/tmp-sparse.vmdk to [datastore2] vmware_temp/1a7f168f-47fe-419c-9b06-3e30ff328019/f684bb17-3ab2-4bd5-a19e-4c36c57d0ebe/f684bb17-3ab2-4bd5-a19e-4c36c57d0ebe.vmdk {{(pid=61962) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 693.194629] env[61962]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-9b36bf89-34e0-447c-b05e-d0ad1097c6f0 {{(pid=61962) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 693.204768] env[61962]: DEBUG oslo_vmware.api [None req-29af0b05-c0ff-41ef-8103-714fd2cae28f tempest-ServerDiagnosticsNegativeTest-1190275590 tempest-ServerDiagnosticsNegativeTest-1190275590-project-member] Waiting for the task: (returnval){ [ 693.204768] env[61962]: value = "task-4891952" [ 693.204768] env[61962]: _type = "Task" [ 693.204768] env[61962]: } to complete. {{(pid=61962) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 693.215311] env[61962]: DEBUG oslo_vmware.api [None req-29af0b05-c0ff-41ef-8103-714fd2cae28f tempest-ServerDiagnosticsNegativeTest-1190275590 tempest-ServerDiagnosticsNegativeTest-1190275590-project-member] Task: {'id': task-4891952, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61962) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 693.715350] env[61962]: DEBUG oslo_vmware.exceptions [None req-29af0b05-c0ff-41ef-8103-714fd2cae28f tempest-ServerDiagnosticsNegativeTest-1190275590 tempest-ServerDiagnosticsNegativeTest-1190275590-project-member] Fault InvalidArgument not matched. {{(pid=61962) get_fault_class /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/exceptions.py:290}} [ 693.715637] env[61962]: DEBUG oslo_concurrency.lockutils [None req-29af0b05-c0ff-41ef-8103-714fd2cae28f tempest-ServerDiagnosticsNegativeTest-1190275590 tempest-ServerDiagnosticsNegativeTest-1190275590-project-member] Releasing lock "[datastore2] devstack-image-cache_base/f684bb17-3ab2-4bd5-a19e-4c36c57d0ebe/f684bb17-3ab2-4bd5-a19e-4c36c57d0ebe.vmdk" {{(pid=61962) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 693.716223] env[61962]: ERROR nova.compute.manager [None req-29af0b05-c0ff-41ef-8103-714fd2cae28f tempest-ServerDiagnosticsNegativeTest-1190275590 tempest-ServerDiagnosticsNegativeTest-1190275590-project-member] [instance: 53921884-f617-4020-b779-650becf89a58] Instance failed to spawn: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 693.716223] env[61962]: Faults: ['InvalidArgument'] [ 693.716223] env[61962]: ERROR nova.compute.manager [instance: 53921884-f617-4020-b779-650becf89a58] Traceback (most recent call last): [ 693.716223] env[61962]: ERROR nova.compute.manager [instance: 53921884-f617-4020-b779-650becf89a58] File "/opt/stack/nova/nova/compute/manager.py", line 2886, in _build_resources [ 693.716223] env[61962]: ERROR nova.compute.manager [instance: 53921884-f617-4020-b779-650becf89a58] yield resources [ 693.716223] env[61962]: ERROR nova.compute.manager [instance: 53921884-f617-4020-b779-650becf89a58] File "/opt/stack/nova/nova/compute/manager.py", line 2633, in _build_and_run_instance [ 693.716223] env[61962]: ERROR nova.compute.manager [instance: 53921884-f617-4020-b779-650becf89a58] self.driver.spawn(context, instance, image_meta, [ 693.716223] env[61962]: ERROR nova.compute.manager [instance: 53921884-f617-4020-b779-650becf89a58] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 693.716223] env[61962]: ERROR nova.compute.manager [instance: 53921884-f617-4020-b779-650becf89a58] self._vmops.spawn(context, instance, image_meta, injected_files, [ 693.716223] env[61962]: ERROR nova.compute.manager [instance: 53921884-f617-4020-b779-650becf89a58] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 693.716223] env[61962]: ERROR nova.compute.manager [instance: 53921884-f617-4020-b779-650becf89a58] self._fetch_image_if_missing(context, vi) [ 693.716223] env[61962]: ERROR nova.compute.manager [instance: 53921884-f617-4020-b779-650becf89a58] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 693.716552] env[61962]: ERROR nova.compute.manager [instance: 53921884-f617-4020-b779-650becf89a58] image_cache(vi, tmp_image_ds_loc) [ 693.716552] env[61962]: ERROR nova.compute.manager [instance: 53921884-f617-4020-b779-650becf89a58] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 693.716552] env[61962]: ERROR nova.compute.manager [instance: 53921884-f617-4020-b779-650becf89a58] vm_util.copy_virtual_disk( [ 693.716552] env[61962]: ERROR nova.compute.manager [instance: 53921884-f617-4020-b779-650becf89a58] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 693.716552] env[61962]: ERROR nova.compute.manager [instance: 53921884-f617-4020-b779-650becf89a58] session._wait_for_task(vmdk_copy_task) [ 693.716552] env[61962]: ERROR nova.compute.manager [instance: 53921884-f617-4020-b779-650becf89a58] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 693.716552] env[61962]: ERROR nova.compute.manager [instance: 53921884-f617-4020-b779-650becf89a58] return self.wait_for_task(task_ref) [ 693.716552] env[61962]: ERROR nova.compute.manager [instance: 53921884-f617-4020-b779-650becf89a58] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 693.716552] env[61962]: ERROR nova.compute.manager [instance: 53921884-f617-4020-b779-650becf89a58] return evt.wait() [ 693.716552] env[61962]: ERROR nova.compute.manager [instance: 53921884-f617-4020-b779-650becf89a58] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 693.716552] env[61962]: ERROR nova.compute.manager [instance: 53921884-f617-4020-b779-650becf89a58] result = hub.switch() [ 693.716552] env[61962]: ERROR nova.compute.manager [instance: 53921884-f617-4020-b779-650becf89a58] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 693.716552] env[61962]: ERROR nova.compute.manager [instance: 53921884-f617-4020-b779-650becf89a58] return self.greenlet.switch() [ 693.717158] env[61962]: ERROR nova.compute.manager [instance: 53921884-f617-4020-b779-650becf89a58] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 693.717158] env[61962]: ERROR nova.compute.manager [instance: 53921884-f617-4020-b779-650becf89a58] self.f(*self.args, **self.kw) [ 693.717158] env[61962]: ERROR nova.compute.manager [instance: 53921884-f617-4020-b779-650becf89a58] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 693.717158] env[61962]: ERROR nova.compute.manager [instance: 53921884-f617-4020-b779-650becf89a58] raise exceptions.translate_fault(task_info.error) [ 693.717158] env[61962]: ERROR nova.compute.manager [instance: 53921884-f617-4020-b779-650becf89a58] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 693.717158] env[61962]: ERROR nova.compute.manager [instance: 53921884-f617-4020-b779-650becf89a58] Faults: ['InvalidArgument'] [ 693.717158] env[61962]: ERROR nova.compute.manager [instance: 53921884-f617-4020-b779-650becf89a58] [ 693.717158] env[61962]: INFO nova.compute.manager [None req-29af0b05-c0ff-41ef-8103-714fd2cae28f tempest-ServerDiagnosticsNegativeTest-1190275590 tempest-ServerDiagnosticsNegativeTest-1190275590-project-member] [instance: 53921884-f617-4020-b779-650becf89a58] Terminating instance [ 693.718124] env[61962]: DEBUG oslo_concurrency.lockutils [None req-21d4b7db-8868-406d-878a-d4e21834a00a tempest-ServerExternalEventsTest-1541707263 tempest-ServerExternalEventsTest-1541707263-project-member] Acquired lock "[datastore2] devstack-image-cache_base/f684bb17-3ab2-4bd5-a19e-4c36c57d0ebe/f684bb17-3ab2-4bd5-a19e-4c36c57d0ebe.vmdk" {{(pid=61962) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 693.718333] env[61962]: DEBUG nova.virt.vmwareapi.ds_util [None req-21d4b7db-8868-406d-878a-d4e21834a00a tempest-ServerExternalEventsTest-1541707263 tempest-ServerExternalEventsTest-1541707263-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=61962) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 693.718584] env[61962]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-a6b43623-a45c-47bc-bd06-d74a46022bfa {{(pid=61962) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 693.721102] env[61962]: DEBUG nova.compute.manager [None req-29af0b05-c0ff-41ef-8103-714fd2cae28f tempest-ServerDiagnosticsNegativeTest-1190275590 tempest-ServerDiagnosticsNegativeTest-1190275590-project-member] [instance: 53921884-f617-4020-b779-650becf89a58] Start destroying the instance on the hypervisor. {{(pid=61962) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3142}} [ 693.721363] env[61962]: DEBUG nova.virt.vmwareapi.vmops [None req-29af0b05-c0ff-41ef-8103-714fd2cae28f tempest-ServerDiagnosticsNegativeTest-1190275590 tempest-ServerDiagnosticsNegativeTest-1190275590-project-member] [instance: 53921884-f617-4020-b779-650becf89a58] Destroying instance {{(pid=61962) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 693.722138] env[61962]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b03050f5-4e6d-48cc-9996-dbff606d983b {{(pid=61962) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 693.729889] env[61962]: DEBUG nova.virt.vmwareapi.vmops [None req-29af0b05-c0ff-41ef-8103-714fd2cae28f tempest-ServerDiagnosticsNegativeTest-1190275590 tempest-ServerDiagnosticsNegativeTest-1190275590-project-member] [instance: 53921884-f617-4020-b779-650becf89a58] Unregistering the VM {{(pid=61962) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 693.730154] env[61962]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-0dc2fa40-d192-41f7-9221-ae7691c8f45e {{(pid=61962) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 693.732486] env[61962]: DEBUG nova.virt.vmwareapi.ds_util [None req-21d4b7db-8868-406d-878a-d4e21834a00a tempest-ServerExternalEventsTest-1541707263 tempest-ServerExternalEventsTest-1541707263-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=61962) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 693.732691] env[61962]: DEBUG nova.virt.vmwareapi.vmops [None req-21d4b7db-8868-406d-878a-d4e21834a00a tempest-ServerExternalEventsTest-1541707263 tempest-ServerExternalEventsTest-1541707263-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=61962) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 693.733691] env[61962]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-4dc1775c-8de8-4868-b34e-342396a9c939 {{(pid=61962) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 693.739092] env[61962]: DEBUG oslo_vmware.api [None req-21d4b7db-8868-406d-878a-d4e21834a00a tempest-ServerExternalEventsTest-1541707263 tempest-ServerExternalEventsTest-1541707263-project-member] Waiting for the task: (returnval){ [ 693.739092] env[61962]: value = "session[5210918f-aadc-9b29-42fa-0929c2e42627]527a1162-91eb-e993-579b-399adc2190fd" [ 693.739092] env[61962]: _type = "Task" [ 693.739092] env[61962]: } to complete. {{(pid=61962) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 693.756073] env[61962]: DEBUG nova.virt.vmwareapi.vmops [None req-21d4b7db-8868-406d-878a-d4e21834a00a tempest-ServerExternalEventsTest-1541707263 tempest-ServerExternalEventsTest-1541707263-project-member] [instance: dc6c9cb6-419f-4122-8351-0c517fb8616d] Preparing fetch location {{(pid=61962) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 693.756073] env[61962]: DEBUG nova.virt.vmwareapi.ds_util [None req-21d4b7db-8868-406d-878a-d4e21834a00a tempest-ServerExternalEventsTest-1541707263 tempest-ServerExternalEventsTest-1541707263-project-member] Creating directory with path [datastore2] vmware_temp/22e5db54-218f-4e68-b00b-b94ed365a30f/f684bb17-3ab2-4bd5-a19e-4c36c57d0ebe {{(pid=61962) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 693.756073] env[61962]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-92700b9b-5673-4eec-9f41-76c0356126ea {{(pid=61962) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 693.769399] env[61962]: DEBUG nova.virt.vmwareapi.ds_util [None req-21d4b7db-8868-406d-878a-d4e21834a00a tempest-ServerExternalEventsTest-1541707263 tempest-ServerExternalEventsTest-1541707263-project-member] Created directory with path [datastore2] vmware_temp/22e5db54-218f-4e68-b00b-b94ed365a30f/f684bb17-3ab2-4bd5-a19e-4c36c57d0ebe {{(pid=61962) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 693.769590] env[61962]: DEBUG nova.virt.vmwareapi.vmops [None req-21d4b7db-8868-406d-878a-d4e21834a00a tempest-ServerExternalEventsTest-1541707263 tempest-ServerExternalEventsTest-1541707263-project-member] [instance: dc6c9cb6-419f-4122-8351-0c517fb8616d] Fetch image to [datastore2] vmware_temp/22e5db54-218f-4e68-b00b-b94ed365a30f/f684bb17-3ab2-4bd5-a19e-4c36c57d0ebe/tmp-sparse.vmdk {{(pid=61962) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 693.769769] env[61962]: DEBUG nova.virt.vmwareapi.vmops [None req-21d4b7db-8868-406d-878a-d4e21834a00a tempest-ServerExternalEventsTest-1541707263 tempest-ServerExternalEventsTest-1541707263-project-member] [instance: dc6c9cb6-419f-4122-8351-0c517fb8616d] Downloading image file data f684bb17-3ab2-4bd5-a19e-4c36c57d0ebe to [datastore2] vmware_temp/22e5db54-218f-4e68-b00b-b94ed365a30f/f684bb17-3ab2-4bd5-a19e-4c36c57d0ebe/tmp-sparse.vmdk on the data store datastore2 {{(pid=61962) _fetch_image_as_file /opt/stack/nova/nova/virt/vmwareapi/vmops.py:399}} [ 693.770592] env[61962]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7c1c2f79-ab11-4f9f-a240-1896f83c6978 {{(pid=61962) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 693.778332] env[61962]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fb8722ff-45f9-42d5-a73e-f5fe752434b3 {{(pid=61962) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 693.787832] env[61962]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-33b835c7-dfcf-4306-9ac3-a30796075847 {{(pid=61962) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 693.819170] env[61962]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8248bae0-0608-44d6-93b2-b3c26ed4aab0 {{(pid=61962) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 693.825498] env[61962]: DEBUG oslo_vmware.service [-] Invoking SessionManager.AcquireGenericServiceTicket with opID=oslo.vmware-f8864332-dc48-4c3a-9d16-0d047e73f779 {{(pid=61962) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 693.847868] env[61962]: DEBUG nova.virt.vmwareapi.images [None req-21d4b7db-8868-406d-878a-d4e21834a00a tempest-ServerExternalEventsTest-1541707263 tempest-ServerExternalEventsTest-1541707263-project-member] [instance: dc6c9cb6-419f-4122-8351-0c517fb8616d] Downloading image file data f684bb17-3ab2-4bd5-a19e-4c36c57d0ebe to the data store datastore2 {{(pid=61962) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:245}} [ 693.901895] env[61962]: DEBUG oslo_vmware.rw_handles [None req-21d4b7db-8868-406d-878a-d4e21834a00a tempest-ServerExternalEventsTest-1541707263 tempest-ServerExternalEventsTest-1541707263-project-member] Creating HTTP connection to write to file with size = 21318656 and URL = https://esx7c2n2.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/22e5db54-218f-4e68-b00b-b94ed365a30f/f684bb17-3ab2-4bd5-a19e-4c36c57d0ebe/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=61962) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 693.962613] env[61962]: DEBUG oslo_vmware.rw_handles [None req-21d4b7db-8868-406d-878a-d4e21834a00a tempest-ServerExternalEventsTest-1541707263 tempest-ServerExternalEventsTest-1541707263-project-member] Completed reading data from the image iterator. {{(pid=61962) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 693.962754] env[61962]: DEBUG oslo_vmware.rw_handles [None req-21d4b7db-8868-406d-878a-d4e21834a00a tempest-ServerExternalEventsTest-1541707263 tempest-ServerExternalEventsTest-1541707263-project-member] Closing write handle for https://esx7c2n2.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/22e5db54-218f-4e68-b00b-b94ed365a30f/f684bb17-3ab2-4bd5-a19e-4c36c57d0ebe/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=61962) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:281}} [ 693.966995] env[61962]: DEBUG nova.virt.vmwareapi.vmops [None req-29af0b05-c0ff-41ef-8103-714fd2cae28f tempest-ServerDiagnosticsNegativeTest-1190275590 tempest-ServerDiagnosticsNegativeTest-1190275590-project-member] [instance: 53921884-f617-4020-b779-650becf89a58] Unregistered the VM {{(pid=61962) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 693.966995] env[61962]: DEBUG nova.virt.vmwareapi.vmops [None req-29af0b05-c0ff-41ef-8103-714fd2cae28f tempest-ServerDiagnosticsNegativeTest-1190275590 tempest-ServerDiagnosticsNegativeTest-1190275590-project-member] [instance: 53921884-f617-4020-b779-650becf89a58] Deleting contents of the VM from datastore datastore2 {{(pid=61962) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 693.966995] env[61962]: DEBUG nova.virt.vmwareapi.ds_util [None req-29af0b05-c0ff-41ef-8103-714fd2cae28f tempest-ServerDiagnosticsNegativeTest-1190275590 tempest-ServerDiagnosticsNegativeTest-1190275590-project-member] Deleting the datastore file [datastore2] 53921884-f617-4020-b779-650becf89a58 {{(pid=61962) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 693.967310] env[61962]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-e5a68276-c7fc-4bcf-9b0c-0ac9913fa3ba {{(pid=61962) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 693.973988] env[61962]: DEBUG oslo_vmware.api [None req-29af0b05-c0ff-41ef-8103-714fd2cae28f tempest-ServerDiagnosticsNegativeTest-1190275590 tempest-ServerDiagnosticsNegativeTest-1190275590-project-member] Waiting for the task: (returnval){ [ 693.973988] env[61962]: value = "task-4891954" [ 693.973988] env[61962]: _type = "Task" [ 693.973988] env[61962]: } to complete. {{(pid=61962) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 693.983526] env[61962]: DEBUG oslo_vmware.api [None req-29af0b05-c0ff-41ef-8103-714fd2cae28f tempest-ServerDiagnosticsNegativeTest-1190275590 tempest-ServerDiagnosticsNegativeTest-1190275590-project-member] Task: {'id': task-4891954, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61962) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 694.485123] env[61962]: DEBUG oslo_vmware.api [None req-29af0b05-c0ff-41ef-8103-714fd2cae28f tempest-ServerDiagnosticsNegativeTest-1190275590 tempest-ServerDiagnosticsNegativeTest-1190275590-project-member] Task: {'id': task-4891954, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.08334} completed successfully. {{(pid=61962) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 694.485510] env[61962]: DEBUG nova.virt.vmwareapi.ds_util [None req-29af0b05-c0ff-41ef-8103-714fd2cae28f tempest-ServerDiagnosticsNegativeTest-1190275590 tempest-ServerDiagnosticsNegativeTest-1190275590-project-member] Deleted the datastore file {{(pid=61962) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 694.485769] env[61962]: DEBUG nova.virt.vmwareapi.vmops [None req-29af0b05-c0ff-41ef-8103-714fd2cae28f tempest-ServerDiagnosticsNegativeTest-1190275590 tempest-ServerDiagnosticsNegativeTest-1190275590-project-member] [instance: 53921884-f617-4020-b779-650becf89a58] Deleted contents of the VM from datastore datastore2 {{(pid=61962) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 694.485996] env[61962]: DEBUG nova.virt.vmwareapi.vmops [None req-29af0b05-c0ff-41ef-8103-714fd2cae28f tempest-ServerDiagnosticsNegativeTest-1190275590 tempest-ServerDiagnosticsNegativeTest-1190275590-project-member] [instance: 53921884-f617-4020-b779-650becf89a58] Instance destroyed {{(pid=61962) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 694.487569] env[61962]: INFO nova.compute.manager [None req-29af0b05-c0ff-41ef-8103-714fd2cae28f tempest-ServerDiagnosticsNegativeTest-1190275590 tempest-ServerDiagnosticsNegativeTest-1190275590-project-member] [instance: 53921884-f617-4020-b779-650becf89a58] Took 0.76 seconds to destroy the instance on the hypervisor. [ 694.489850] env[61962]: DEBUG nova.compute.claims [None req-29af0b05-c0ff-41ef-8103-714fd2cae28f tempest-ServerDiagnosticsNegativeTest-1190275590 tempest-ServerDiagnosticsNegativeTest-1190275590-project-member] [instance: 53921884-f617-4020-b779-650becf89a58] Aborting claim: {{(pid=61962) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 694.490090] env[61962]: DEBUG oslo_concurrency.lockutils [None req-29af0b05-c0ff-41ef-8103-714fd2cae28f tempest-ServerDiagnosticsNegativeTest-1190275590 tempest-ServerDiagnosticsNegativeTest-1190275590-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=61962) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 694.490357] env[61962]: DEBUG oslo_concurrency.lockutils [None req-29af0b05-c0ff-41ef-8103-714fd2cae28f tempest-ServerDiagnosticsNegativeTest-1190275590 tempest-ServerDiagnosticsNegativeTest-1190275590-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 0.000s {{(pid=61962) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 694.935264] env[61962]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a9902963-f9df-4888-9f13-c387ab268cec {{(pid=61962) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 694.943859] env[61962]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2535e2b6-d6a6-4e0f-8508-08140622fe60 {{(pid=61962) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 694.974628] env[61962]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a3bdafc6-6b7f-475c-89d2-4fa813c6376b {{(pid=61962) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 694.983012] env[61962]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1f64c8c7-1376-47db-be9f-d516b7669736 {{(pid=61962) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 694.997093] env[61962]: DEBUG nova.compute.provider_tree [None req-29af0b05-c0ff-41ef-8103-714fd2cae28f tempest-ServerDiagnosticsNegativeTest-1190275590 tempest-ServerDiagnosticsNegativeTest-1190275590-project-member] Inventory has not changed in ProviderTree for provider: 5a20dc57-fddd-49ec-bab5-953a03eebaa1 {{(pid=61962) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 695.006294] env[61962]: DEBUG nova.scheduler.client.report [None req-29af0b05-c0ff-41ef-8103-714fd2cae28f tempest-ServerDiagnosticsNegativeTest-1190275590 tempest-ServerDiagnosticsNegativeTest-1190275590-project-member] Inventory has not changed for provider 5a20dc57-fddd-49ec-bab5-953a03eebaa1 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 96, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61962) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 695.022696] env[61962]: DEBUG oslo_concurrency.lockutils [None req-29af0b05-c0ff-41ef-8103-714fd2cae28f tempest-ServerDiagnosticsNegativeTest-1190275590 tempest-ServerDiagnosticsNegativeTest-1190275590-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 0.532s {{(pid=61962) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 695.023665] env[61962]: ERROR nova.compute.manager [None req-29af0b05-c0ff-41ef-8103-714fd2cae28f tempest-ServerDiagnosticsNegativeTest-1190275590 tempest-ServerDiagnosticsNegativeTest-1190275590-project-member] [instance: 53921884-f617-4020-b779-650becf89a58] Failed to build and run instance: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 695.023665] env[61962]: Faults: ['InvalidArgument'] [ 695.023665] env[61962]: ERROR nova.compute.manager [instance: 53921884-f617-4020-b779-650becf89a58] Traceback (most recent call last): [ 695.023665] env[61962]: ERROR nova.compute.manager [instance: 53921884-f617-4020-b779-650becf89a58] File "/opt/stack/nova/nova/compute/manager.py", line 2633, in _build_and_run_instance [ 695.023665] env[61962]: ERROR nova.compute.manager [instance: 53921884-f617-4020-b779-650becf89a58] self.driver.spawn(context, instance, image_meta, [ 695.023665] env[61962]: ERROR nova.compute.manager [instance: 53921884-f617-4020-b779-650becf89a58] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 695.023665] env[61962]: ERROR nova.compute.manager [instance: 53921884-f617-4020-b779-650becf89a58] self._vmops.spawn(context, instance, image_meta, injected_files, [ 695.023665] env[61962]: ERROR nova.compute.manager [instance: 53921884-f617-4020-b779-650becf89a58] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 695.023665] env[61962]: ERROR nova.compute.manager [instance: 53921884-f617-4020-b779-650becf89a58] self._fetch_image_if_missing(context, vi) [ 695.023665] env[61962]: ERROR nova.compute.manager [instance: 53921884-f617-4020-b779-650becf89a58] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 695.023665] env[61962]: ERROR nova.compute.manager [instance: 53921884-f617-4020-b779-650becf89a58] image_cache(vi, tmp_image_ds_loc) [ 695.023665] env[61962]: ERROR nova.compute.manager [instance: 53921884-f617-4020-b779-650becf89a58] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 695.024177] env[61962]: ERROR nova.compute.manager [instance: 53921884-f617-4020-b779-650becf89a58] vm_util.copy_virtual_disk( [ 695.024177] env[61962]: ERROR nova.compute.manager [instance: 53921884-f617-4020-b779-650becf89a58] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 695.024177] env[61962]: ERROR nova.compute.manager [instance: 53921884-f617-4020-b779-650becf89a58] session._wait_for_task(vmdk_copy_task) [ 695.024177] env[61962]: ERROR nova.compute.manager [instance: 53921884-f617-4020-b779-650becf89a58] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 695.024177] env[61962]: ERROR nova.compute.manager [instance: 53921884-f617-4020-b779-650becf89a58] return self.wait_for_task(task_ref) [ 695.024177] env[61962]: ERROR nova.compute.manager [instance: 53921884-f617-4020-b779-650becf89a58] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 695.024177] env[61962]: ERROR nova.compute.manager [instance: 53921884-f617-4020-b779-650becf89a58] return evt.wait() [ 695.024177] env[61962]: ERROR nova.compute.manager [instance: 53921884-f617-4020-b779-650becf89a58] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 695.024177] env[61962]: ERROR nova.compute.manager [instance: 53921884-f617-4020-b779-650becf89a58] result = hub.switch() [ 695.024177] env[61962]: ERROR nova.compute.manager [instance: 53921884-f617-4020-b779-650becf89a58] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 695.024177] env[61962]: ERROR nova.compute.manager [instance: 53921884-f617-4020-b779-650becf89a58] return self.greenlet.switch() [ 695.024177] env[61962]: ERROR nova.compute.manager [instance: 53921884-f617-4020-b779-650becf89a58] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 695.024177] env[61962]: ERROR nova.compute.manager [instance: 53921884-f617-4020-b779-650becf89a58] self.f(*self.args, **self.kw) [ 695.024545] env[61962]: ERROR nova.compute.manager [instance: 53921884-f617-4020-b779-650becf89a58] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 695.024545] env[61962]: ERROR nova.compute.manager [instance: 53921884-f617-4020-b779-650becf89a58] raise exceptions.translate_fault(task_info.error) [ 695.024545] env[61962]: ERROR nova.compute.manager [instance: 53921884-f617-4020-b779-650becf89a58] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 695.024545] env[61962]: ERROR nova.compute.manager [instance: 53921884-f617-4020-b779-650becf89a58] Faults: ['InvalidArgument'] [ 695.024545] env[61962]: ERROR nova.compute.manager [instance: 53921884-f617-4020-b779-650becf89a58] [ 695.024545] env[61962]: DEBUG nova.compute.utils [None req-29af0b05-c0ff-41ef-8103-714fd2cae28f tempest-ServerDiagnosticsNegativeTest-1190275590 tempest-ServerDiagnosticsNegativeTest-1190275590-project-member] [instance: 53921884-f617-4020-b779-650becf89a58] VimFaultException {{(pid=61962) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 695.025726] env[61962]: DEBUG nova.compute.manager [None req-29af0b05-c0ff-41ef-8103-714fd2cae28f tempest-ServerDiagnosticsNegativeTest-1190275590 tempest-ServerDiagnosticsNegativeTest-1190275590-project-member] [instance: 53921884-f617-4020-b779-650becf89a58] Build of instance 53921884-f617-4020-b779-650becf89a58 was re-scheduled: A specified parameter was not correct: fileType [ 695.025726] env[61962]: Faults: ['InvalidArgument'] {{(pid=61962) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2472}} [ 695.026144] env[61962]: DEBUG nova.compute.manager [None req-29af0b05-c0ff-41ef-8103-714fd2cae28f tempest-ServerDiagnosticsNegativeTest-1190275590 tempest-ServerDiagnosticsNegativeTest-1190275590-project-member] [instance: 53921884-f617-4020-b779-650becf89a58] Unplugging VIFs for instance {{(pid=61962) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:2998}} [ 695.026322] env[61962]: DEBUG nova.compute.manager [None req-29af0b05-c0ff-41ef-8103-714fd2cae28f tempest-ServerDiagnosticsNegativeTest-1190275590 tempest-ServerDiagnosticsNegativeTest-1190275590-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=61962) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3021}} [ 695.026488] env[61962]: DEBUG nova.compute.manager [None req-29af0b05-c0ff-41ef-8103-714fd2cae28f tempest-ServerDiagnosticsNegativeTest-1190275590 tempest-ServerDiagnosticsNegativeTest-1190275590-project-member] [instance: 53921884-f617-4020-b779-650becf89a58] Deallocating network for instance {{(pid=61962) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2281}} [ 695.026649] env[61962]: DEBUG nova.network.neutron [None req-29af0b05-c0ff-41ef-8103-714fd2cae28f tempest-ServerDiagnosticsNegativeTest-1190275590 tempest-ServerDiagnosticsNegativeTest-1190275590-project-member] [instance: 53921884-f617-4020-b779-650becf89a58] deallocate_for_instance() {{(pid=61962) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 695.387970] env[61962]: DEBUG nova.network.neutron [None req-29af0b05-c0ff-41ef-8103-714fd2cae28f tempest-ServerDiagnosticsNegativeTest-1190275590 tempest-ServerDiagnosticsNegativeTest-1190275590-project-member] [instance: 53921884-f617-4020-b779-650becf89a58] Updating instance_info_cache with network_info: [] {{(pid=61962) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 695.402829] env[61962]: INFO nova.compute.manager [None req-29af0b05-c0ff-41ef-8103-714fd2cae28f tempest-ServerDiagnosticsNegativeTest-1190275590 tempest-ServerDiagnosticsNegativeTest-1190275590-project-member] [instance: 53921884-f617-4020-b779-650becf89a58] Took 0.37 seconds to deallocate network for instance. [ 695.520221] env[61962]: INFO nova.scheduler.client.report [None req-29af0b05-c0ff-41ef-8103-714fd2cae28f tempest-ServerDiagnosticsNegativeTest-1190275590 tempest-ServerDiagnosticsNegativeTest-1190275590-project-member] Deleted allocations for instance 53921884-f617-4020-b779-650becf89a58 [ 695.548049] env[61962]: DEBUG oslo_concurrency.lockutils [None req-29af0b05-c0ff-41ef-8103-714fd2cae28f tempest-ServerDiagnosticsNegativeTest-1190275590 tempest-ServerDiagnosticsNegativeTest-1190275590-project-member] Lock "53921884-f617-4020-b779-650becf89a58" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 147.384s {{(pid=61962) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 695.561670] env[61962]: DEBUG nova.compute.manager [None req-2457412b-4345-46bb-8773-263f9b3b3e38 tempest-MultipleCreateTestJSON-937113120 tempest-MultipleCreateTestJSON-937113120-project-member] [instance: 85c6b6af-f1b5-47ca-8e02-61c3669ec02b] Starting instance... {{(pid=61962) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2424}} [ 695.618445] env[61962]: DEBUG oslo_concurrency.lockutils [None req-2457412b-4345-46bb-8773-263f9b3b3e38 tempest-MultipleCreateTestJSON-937113120 tempest-MultipleCreateTestJSON-937113120-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61962) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 695.618699] env[61962]: DEBUG oslo_concurrency.lockutils [None req-2457412b-4345-46bb-8773-263f9b3b3e38 tempest-MultipleCreateTestJSON-937113120 tempest-MultipleCreateTestJSON-937113120-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=61962) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 695.620231] env[61962]: INFO nova.compute.claims [None req-2457412b-4345-46bb-8773-263f9b3b3e38 tempest-MultipleCreateTestJSON-937113120 tempest-MultipleCreateTestJSON-937113120-project-member] [instance: 85c6b6af-f1b5-47ca-8e02-61c3669ec02b] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 696.062282] env[61962]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-04e53afb-2973-49ba-8346-7b3f459b3153 {{(pid=61962) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 696.070073] env[61962]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-72df4221-288c-4ece-a1ce-9e5303b8cb48 {{(pid=61962) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 696.101882] env[61962]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-844da250-e097-4b3d-b7ea-7a9430ddf114 {{(pid=61962) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 696.109996] env[61962]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-182e266c-8303-4c5b-989d-0e5db5a730ce {{(pid=61962) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 696.124451] env[61962]: DEBUG nova.compute.provider_tree [None req-2457412b-4345-46bb-8773-263f9b3b3e38 tempest-MultipleCreateTestJSON-937113120 tempest-MultipleCreateTestJSON-937113120-project-member] Inventory has not changed in ProviderTree for provider: 5a20dc57-fddd-49ec-bab5-953a03eebaa1 {{(pid=61962) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 696.133101] env[61962]: DEBUG nova.scheduler.client.report [None req-2457412b-4345-46bb-8773-263f9b3b3e38 tempest-MultipleCreateTestJSON-937113120 tempest-MultipleCreateTestJSON-937113120-project-member] Inventory has not changed for provider 5a20dc57-fddd-49ec-bab5-953a03eebaa1 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 96, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61962) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 696.154740] env[61962]: DEBUG oslo_concurrency.lockutils [None req-2457412b-4345-46bb-8773-263f9b3b3e38 tempest-MultipleCreateTestJSON-937113120 tempest-MultipleCreateTestJSON-937113120-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.536s {{(pid=61962) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 696.155658] env[61962]: DEBUG nova.compute.manager [None req-2457412b-4345-46bb-8773-263f9b3b3e38 tempest-MultipleCreateTestJSON-937113120 tempest-MultipleCreateTestJSON-937113120-project-member] [instance: 85c6b6af-f1b5-47ca-8e02-61c3669ec02b] Start building networks asynchronously for instance. {{(pid=61962) _build_resources /opt/stack/nova/nova/compute/manager.py:2821}} [ 696.193744] env[61962]: DEBUG nova.compute.utils [None req-2457412b-4345-46bb-8773-263f9b3b3e38 tempest-MultipleCreateTestJSON-937113120 tempest-MultipleCreateTestJSON-937113120-project-member] Using /dev/sd instead of None {{(pid=61962) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 696.195500] env[61962]: DEBUG nova.compute.manager [None req-2457412b-4345-46bb-8773-263f9b3b3e38 tempest-MultipleCreateTestJSON-937113120 tempest-MultipleCreateTestJSON-937113120-project-member] [instance: 85c6b6af-f1b5-47ca-8e02-61c3669ec02b] Allocating IP information in the background. {{(pid=61962) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1973}} [ 696.195737] env[61962]: DEBUG nova.network.neutron [None req-2457412b-4345-46bb-8773-263f9b3b3e38 tempest-MultipleCreateTestJSON-937113120 tempest-MultipleCreateTestJSON-937113120-project-member] [instance: 85c6b6af-f1b5-47ca-8e02-61c3669ec02b] allocate_for_instance() {{(pid=61962) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 696.205540] env[61962]: DEBUG nova.compute.manager [None req-2457412b-4345-46bb-8773-263f9b3b3e38 tempest-MultipleCreateTestJSON-937113120 tempest-MultipleCreateTestJSON-937113120-project-member] [instance: 85c6b6af-f1b5-47ca-8e02-61c3669ec02b] Start building block device mappings for instance. {{(pid=61962) _build_resources /opt/stack/nova/nova/compute/manager.py:2856}} [ 696.263359] env[61962]: DEBUG nova.policy [None req-2457412b-4345-46bb-8773-263f9b3b3e38 tempest-MultipleCreateTestJSON-937113120 tempest-MultipleCreateTestJSON-937113120-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '6183a3edafb34815b27a63da01d7ca7a', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'ed943c69c73d48e2b1b2bab311295d84', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61962) authorize /opt/stack/nova/nova/policy.py:203}} [ 696.285342] env[61962]: DEBUG nova.compute.manager [None req-2457412b-4345-46bb-8773-263f9b3b3e38 tempest-MultipleCreateTestJSON-937113120 tempest-MultipleCreateTestJSON-937113120-project-member] [instance: 85c6b6af-f1b5-47ca-8e02-61c3669ec02b] Start spawning the instance on the hypervisor. {{(pid=61962) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2630}} [ 696.324524] env[61962]: DEBUG nova.virt.hardware [None req-2457412b-4345-46bb-8773-263f9b3b3e38 tempest-MultipleCreateTestJSON-937113120 tempest-MultipleCreateTestJSON-937113120-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-12-01T12:09:23Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=128,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-12-01T12:09:07Z,direct_url=,disk_format='vmdk',id=f684bb17-3ab2-4bd5-a19e-4c36c57d0ebe,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='eef556fb5c4f49b889491ae31a496de5',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-12-01T12:09:08Z,virtual_size=,visibility=), allow threads: False {{(pid=61962) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 696.324835] env[61962]: DEBUG nova.virt.hardware [None req-2457412b-4345-46bb-8773-263f9b3b3e38 tempest-MultipleCreateTestJSON-937113120 tempest-MultipleCreateTestJSON-937113120-project-member] Flavor limits 0:0:0 {{(pid=61962) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 696.325130] env[61962]: DEBUG nova.virt.hardware [None req-2457412b-4345-46bb-8773-263f9b3b3e38 tempest-MultipleCreateTestJSON-937113120 tempest-MultipleCreateTestJSON-937113120-project-member] Image limits 0:0:0 {{(pid=61962) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 696.325364] env[61962]: DEBUG nova.virt.hardware [None req-2457412b-4345-46bb-8773-263f9b3b3e38 tempest-MultipleCreateTestJSON-937113120 tempest-MultipleCreateTestJSON-937113120-project-member] Flavor pref 0:0:0 {{(pid=61962) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 696.325559] env[61962]: DEBUG nova.virt.hardware [None req-2457412b-4345-46bb-8773-263f9b3b3e38 tempest-MultipleCreateTestJSON-937113120 tempest-MultipleCreateTestJSON-937113120-project-member] Image pref 0:0:0 {{(pid=61962) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 696.325755] env[61962]: DEBUG nova.virt.hardware [None req-2457412b-4345-46bb-8773-263f9b3b3e38 tempest-MultipleCreateTestJSON-937113120 tempest-MultipleCreateTestJSON-937113120-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61962) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 696.326075] env[61962]: DEBUG nova.virt.hardware [None req-2457412b-4345-46bb-8773-263f9b3b3e38 tempest-MultipleCreateTestJSON-937113120 tempest-MultipleCreateTestJSON-937113120-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61962) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 696.326303] env[61962]: DEBUG nova.virt.hardware [None req-2457412b-4345-46bb-8773-263f9b3b3e38 tempest-MultipleCreateTestJSON-937113120 tempest-MultipleCreateTestJSON-937113120-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61962) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 696.326586] env[61962]: DEBUG nova.virt.hardware [None req-2457412b-4345-46bb-8773-263f9b3b3e38 tempest-MultipleCreateTestJSON-937113120 tempest-MultipleCreateTestJSON-937113120-project-member] Got 1 possible topologies {{(pid=61962) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 696.326776] env[61962]: DEBUG nova.virt.hardware [None req-2457412b-4345-46bb-8773-263f9b3b3e38 tempest-MultipleCreateTestJSON-937113120 tempest-MultipleCreateTestJSON-937113120-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61962) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 696.327277] env[61962]: DEBUG nova.virt.hardware [None req-2457412b-4345-46bb-8773-263f9b3b3e38 tempest-MultipleCreateTestJSON-937113120 tempest-MultipleCreateTestJSON-937113120-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61962) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 696.328085] env[61962]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-af8a88a2-c71e-4975-b93d-d1fc2f69fdd8 {{(pid=61962) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 696.338098] env[61962]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9460128d-8a2c-4e79-878a-0dc36e149305 {{(pid=61962) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 696.753537] env[61962]: DEBUG nova.network.neutron [None req-2457412b-4345-46bb-8773-263f9b3b3e38 tempest-MultipleCreateTestJSON-937113120 tempest-MultipleCreateTestJSON-937113120-project-member] [instance: 85c6b6af-f1b5-47ca-8e02-61c3669ec02b] Successfully created port: 99dfe6b7-84fa-445e-829b-760e7f7303a6 {{(pid=61962) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 697.995472] env[61962]: DEBUG nova.network.neutron [None req-2457412b-4345-46bb-8773-263f9b3b3e38 tempest-MultipleCreateTestJSON-937113120 tempest-MultipleCreateTestJSON-937113120-project-member] [instance: 85c6b6af-f1b5-47ca-8e02-61c3669ec02b] Successfully updated port: 99dfe6b7-84fa-445e-829b-760e7f7303a6 {{(pid=61962) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 698.011304] env[61962]: DEBUG oslo_concurrency.lockutils [None req-2457412b-4345-46bb-8773-263f9b3b3e38 tempest-MultipleCreateTestJSON-937113120 tempest-MultipleCreateTestJSON-937113120-project-member] Acquiring lock "refresh_cache-85c6b6af-f1b5-47ca-8e02-61c3669ec02b" {{(pid=61962) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 698.011445] env[61962]: DEBUG oslo_concurrency.lockutils [None req-2457412b-4345-46bb-8773-263f9b3b3e38 tempest-MultipleCreateTestJSON-937113120 tempest-MultipleCreateTestJSON-937113120-project-member] Acquired lock "refresh_cache-85c6b6af-f1b5-47ca-8e02-61c3669ec02b" {{(pid=61962) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 698.011628] env[61962]: DEBUG nova.network.neutron [None req-2457412b-4345-46bb-8773-263f9b3b3e38 tempest-MultipleCreateTestJSON-937113120 tempest-MultipleCreateTestJSON-937113120-project-member] [instance: 85c6b6af-f1b5-47ca-8e02-61c3669ec02b] Building network info cache for instance {{(pid=61962) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 698.080979] env[61962]: DEBUG nova.network.neutron [None req-2457412b-4345-46bb-8773-263f9b3b3e38 tempest-MultipleCreateTestJSON-937113120 tempest-MultipleCreateTestJSON-937113120-project-member] [instance: 85c6b6af-f1b5-47ca-8e02-61c3669ec02b] Instance cache missing network info. {{(pid=61962) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 698.141671] env[61962]: DEBUG nova.compute.manager [req-c20ae4ba-714a-4dc3-84f9-0eea43fa8ede req-332a896d-a4d2-4050-9405-6ee510f5a347 service nova] [instance: 85c6b6af-f1b5-47ca-8e02-61c3669ec02b] Received event network-vif-plugged-99dfe6b7-84fa-445e-829b-760e7f7303a6 {{(pid=61962) external_instance_event /opt/stack/nova/nova/compute/manager.py:11210}} [ 698.141993] env[61962]: DEBUG oslo_concurrency.lockutils [req-c20ae4ba-714a-4dc3-84f9-0eea43fa8ede req-332a896d-a4d2-4050-9405-6ee510f5a347 service nova] Acquiring lock "85c6b6af-f1b5-47ca-8e02-61c3669ec02b-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=61962) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 698.142265] env[61962]: DEBUG oslo_concurrency.lockutils [req-c20ae4ba-714a-4dc3-84f9-0eea43fa8ede req-332a896d-a4d2-4050-9405-6ee510f5a347 service nova] Lock "85c6b6af-f1b5-47ca-8e02-61c3669ec02b-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=61962) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 698.142454] env[61962]: DEBUG oslo_concurrency.lockutils [req-c20ae4ba-714a-4dc3-84f9-0eea43fa8ede req-332a896d-a4d2-4050-9405-6ee510f5a347 service nova] Lock "85c6b6af-f1b5-47ca-8e02-61c3669ec02b-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=61962) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 698.142670] env[61962]: DEBUG nova.compute.manager [req-c20ae4ba-714a-4dc3-84f9-0eea43fa8ede req-332a896d-a4d2-4050-9405-6ee510f5a347 service nova] [instance: 85c6b6af-f1b5-47ca-8e02-61c3669ec02b] No waiting events found dispatching network-vif-plugged-99dfe6b7-84fa-445e-829b-760e7f7303a6 {{(pid=61962) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 698.142897] env[61962]: WARNING nova.compute.manager [req-c20ae4ba-714a-4dc3-84f9-0eea43fa8ede req-332a896d-a4d2-4050-9405-6ee510f5a347 service nova] [instance: 85c6b6af-f1b5-47ca-8e02-61c3669ec02b] Received unexpected event network-vif-plugged-99dfe6b7-84fa-445e-829b-760e7f7303a6 for instance with vm_state building and task_state spawning. [ 698.505066] env[61962]: DEBUG nova.network.neutron [None req-2457412b-4345-46bb-8773-263f9b3b3e38 tempest-MultipleCreateTestJSON-937113120 tempest-MultipleCreateTestJSON-937113120-project-member] [instance: 85c6b6af-f1b5-47ca-8e02-61c3669ec02b] Updating instance_info_cache with network_info: [{"id": "99dfe6b7-84fa-445e-829b-760e7f7303a6", "address": "fa:16:3e:61:fa:43", "network": {"id": "5b523553-6f39-4db6-96a6-a28671c489ae", "bridge": "br-int", "label": "tempest-MultipleCreateTestJSON-294270932-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "ed943c69c73d48e2b1b2bab311295d84", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "8ab9e5e6-9bf8-4a8d-91c8-d22148e3d2ee", "external-id": "nsx-vlan-transportzone-401", "segmentation_id": 401, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap99dfe6b7-84", "ovs_interfaceid": "99dfe6b7-84fa-445e-829b-760e7f7303a6", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61962) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 698.518327] env[61962]: DEBUG oslo_concurrency.lockutils [None req-2457412b-4345-46bb-8773-263f9b3b3e38 tempest-MultipleCreateTestJSON-937113120 tempest-MultipleCreateTestJSON-937113120-project-member] Releasing lock "refresh_cache-85c6b6af-f1b5-47ca-8e02-61c3669ec02b" {{(pid=61962) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 698.518629] env[61962]: DEBUG nova.compute.manager [None req-2457412b-4345-46bb-8773-263f9b3b3e38 tempest-MultipleCreateTestJSON-937113120 tempest-MultipleCreateTestJSON-937113120-project-member] [instance: 85c6b6af-f1b5-47ca-8e02-61c3669ec02b] Instance network_info: |[{"id": "99dfe6b7-84fa-445e-829b-760e7f7303a6", "address": "fa:16:3e:61:fa:43", "network": {"id": "5b523553-6f39-4db6-96a6-a28671c489ae", "bridge": "br-int", "label": "tempest-MultipleCreateTestJSON-294270932-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "ed943c69c73d48e2b1b2bab311295d84", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "8ab9e5e6-9bf8-4a8d-91c8-d22148e3d2ee", "external-id": "nsx-vlan-transportzone-401", "segmentation_id": 401, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap99dfe6b7-84", "ovs_interfaceid": "99dfe6b7-84fa-445e-829b-760e7f7303a6", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=61962) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 698.519053] env[61962]: DEBUG nova.virt.vmwareapi.vmops [None req-2457412b-4345-46bb-8773-263f9b3b3e38 tempest-MultipleCreateTestJSON-937113120 tempest-MultipleCreateTestJSON-937113120-project-member] [instance: 85c6b6af-f1b5-47ca-8e02-61c3669ec02b] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:61:fa:43', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '8ab9e5e6-9bf8-4a8d-91c8-d22148e3d2ee', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '99dfe6b7-84fa-445e-829b-760e7f7303a6', 'vif_model': 'vmxnet3'}] {{(pid=61962) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 698.532653] env[61962]: DEBUG nova.virt.vmwareapi.vm_util [None req-2457412b-4345-46bb-8773-263f9b3b3e38 tempest-MultipleCreateTestJSON-937113120 tempest-MultipleCreateTestJSON-937113120-project-member] Creating folder: Project (ed943c69c73d48e2b1b2bab311295d84). Parent ref: group-v953327. {{(pid=61962) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 698.534144] env[61962]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-462b7a6e-a9a3-4b49-865c-6ce21fb5c7c7 {{(pid=61962) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 698.546336] env[61962]: INFO nova.virt.vmwareapi.vm_util [None req-2457412b-4345-46bb-8773-263f9b3b3e38 tempest-MultipleCreateTestJSON-937113120 tempest-MultipleCreateTestJSON-937113120-project-member] Created folder: Project (ed943c69c73d48e2b1b2bab311295d84) in parent group-v953327. [ 698.546556] env[61962]: DEBUG nova.virt.vmwareapi.vm_util [None req-2457412b-4345-46bb-8773-263f9b3b3e38 tempest-MultipleCreateTestJSON-937113120 tempest-MultipleCreateTestJSON-937113120-project-member] Creating folder: Instances. Parent ref: group-v953368. {{(pid=61962) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 698.546830] env[61962]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-104e55fa-df5d-4b08-9844-38690048fe70 {{(pid=61962) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 698.559034] env[61962]: INFO nova.virt.vmwareapi.vm_util [None req-2457412b-4345-46bb-8773-263f9b3b3e38 tempest-MultipleCreateTestJSON-937113120 tempest-MultipleCreateTestJSON-937113120-project-member] Created folder: Instances in parent group-v953368. [ 698.559289] env[61962]: DEBUG oslo.service.loopingcall [None req-2457412b-4345-46bb-8773-263f9b3b3e38 tempest-MultipleCreateTestJSON-937113120 tempest-MultipleCreateTestJSON-937113120-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=61962) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 698.559486] env[61962]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 85c6b6af-f1b5-47ca-8e02-61c3669ec02b] Creating VM on the ESX host {{(pid=61962) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 698.559705] env[61962]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-3119717e-3866-42eb-bf47-9b979f2f5c45 {{(pid=61962) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 698.581918] env[61962]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 698.581918] env[61962]: value = "task-4891957" [ 698.581918] env[61962]: _type = "Task" [ 698.581918] env[61962]: } to complete. {{(pid=61962) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 698.591534] env[61962]: DEBUG oslo_vmware.api [-] Task: {'id': task-4891957, 'name': CreateVM_Task} progress is 0%. {{(pid=61962) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 699.092834] env[61962]: DEBUG oslo_vmware.api [-] Task: {'id': task-4891957, 'name': CreateVM_Task, 'duration_secs': 0.427381} completed successfully. {{(pid=61962) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 699.092834] env[61962]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 85c6b6af-f1b5-47ca-8e02-61c3669ec02b] Created VM on the ESX host {{(pid=61962) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 699.093583] env[61962]: DEBUG oslo_concurrency.lockutils [None req-2457412b-4345-46bb-8773-263f9b3b3e38 tempest-MultipleCreateTestJSON-937113120 tempest-MultipleCreateTestJSON-937113120-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/f684bb17-3ab2-4bd5-a19e-4c36c57d0ebe" {{(pid=61962) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 699.093785] env[61962]: DEBUG oslo_concurrency.lockutils [None req-2457412b-4345-46bb-8773-263f9b3b3e38 tempest-MultipleCreateTestJSON-937113120 tempest-MultipleCreateTestJSON-937113120-project-member] Acquired lock "[datastore2] devstack-image-cache_base/f684bb17-3ab2-4bd5-a19e-4c36c57d0ebe" {{(pid=61962) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 699.094158] env[61962]: DEBUG oslo_concurrency.lockutils [None req-2457412b-4345-46bb-8773-263f9b3b3e38 tempest-MultipleCreateTestJSON-937113120 tempest-MultipleCreateTestJSON-937113120-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/f684bb17-3ab2-4bd5-a19e-4c36c57d0ebe" {{(pid=61962) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 699.094446] env[61962]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-5e128226-862d-4240-b102-d1424f4a5c2a {{(pid=61962) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 699.100515] env[61962]: DEBUG oslo_vmware.api [None req-2457412b-4345-46bb-8773-263f9b3b3e38 tempest-MultipleCreateTestJSON-937113120 tempest-MultipleCreateTestJSON-937113120-project-member] Waiting for the task: (returnval){ [ 699.100515] env[61962]: value = "session[5210918f-aadc-9b29-42fa-0929c2e42627]521f593b-c556-5f52-f9ac-b80b63062423" [ 699.100515] env[61962]: _type = "Task" [ 699.100515] env[61962]: } to complete. {{(pid=61962) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 699.110826] env[61962]: DEBUG oslo_vmware.api [None req-2457412b-4345-46bb-8773-263f9b3b3e38 tempest-MultipleCreateTestJSON-937113120 tempest-MultipleCreateTestJSON-937113120-project-member] Task: {'id': session[5210918f-aadc-9b29-42fa-0929c2e42627]521f593b-c556-5f52-f9ac-b80b63062423, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61962) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 699.613191] env[61962]: DEBUG oslo_concurrency.lockutils [None req-2457412b-4345-46bb-8773-263f9b3b3e38 tempest-MultipleCreateTestJSON-937113120 tempest-MultipleCreateTestJSON-937113120-project-member] Releasing lock "[datastore2] devstack-image-cache_base/f684bb17-3ab2-4bd5-a19e-4c36c57d0ebe" {{(pid=61962) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 699.613424] env[61962]: DEBUG nova.virt.vmwareapi.vmops [None req-2457412b-4345-46bb-8773-263f9b3b3e38 tempest-MultipleCreateTestJSON-937113120 tempest-MultipleCreateTestJSON-937113120-project-member] [instance: 85c6b6af-f1b5-47ca-8e02-61c3669ec02b] Processing image f684bb17-3ab2-4bd5-a19e-4c36c57d0ebe {{(pid=61962) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 699.613640] env[61962]: DEBUG oslo_concurrency.lockutils [None req-2457412b-4345-46bb-8773-263f9b3b3e38 tempest-MultipleCreateTestJSON-937113120 tempest-MultipleCreateTestJSON-937113120-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/f684bb17-3ab2-4bd5-a19e-4c36c57d0ebe/f684bb17-3ab2-4bd5-a19e-4c36c57d0ebe.vmdk" {{(pid=61962) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 700.535161] env[61962]: DEBUG nova.compute.manager [req-ba59ba10-f30a-4717-ba59-dab4bf9a7d9a req-423765a5-bb76-40f7-b264-c43ee509a5e5 service nova] [instance: 85c6b6af-f1b5-47ca-8e02-61c3669ec02b] Received event network-changed-99dfe6b7-84fa-445e-829b-760e7f7303a6 {{(pid=61962) external_instance_event /opt/stack/nova/nova/compute/manager.py:11210}} [ 700.535439] env[61962]: DEBUG nova.compute.manager [req-ba59ba10-f30a-4717-ba59-dab4bf9a7d9a req-423765a5-bb76-40f7-b264-c43ee509a5e5 service nova] [instance: 85c6b6af-f1b5-47ca-8e02-61c3669ec02b] Refreshing instance network info cache due to event network-changed-99dfe6b7-84fa-445e-829b-760e7f7303a6. {{(pid=61962) external_instance_event /opt/stack/nova/nova/compute/manager.py:11215}} [ 700.535640] env[61962]: DEBUG oslo_concurrency.lockutils [req-ba59ba10-f30a-4717-ba59-dab4bf9a7d9a req-423765a5-bb76-40f7-b264-c43ee509a5e5 service nova] Acquiring lock "refresh_cache-85c6b6af-f1b5-47ca-8e02-61c3669ec02b" {{(pid=61962) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 700.535716] env[61962]: DEBUG oslo_concurrency.lockutils [req-ba59ba10-f30a-4717-ba59-dab4bf9a7d9a req-423765a5-bb76-40f7-b264-c43ee509a5e5 service nova] Acquired lock "refresh_cache-85c6b6af-f1b5-47ca-8e02-61c3669ec02b" {{(pid=61962) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 700.535929] env[61962]: DEBUG nova.network.neutron [req-ba59ba10-f30a-4717-ba59-dab4bf9a7d9a req-423765a5-bb76-40f7-b264-c43ee509a5e5 service nova] [instance: 85c6b6af-f1b5-47ca-8e02-61c3669ec02b] Refreshing network info cache for port 99dfe6b7-84fa-445e-829b-760e7f7303a6 {{(pid=61962) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 701.140323] env[61962]: DEBUG nova.network.neutron [req-ba59ba10-f30a-4717-ba59-dab4bf9a7d9a req-423765a5-bb76-40f7-b264-c43ee509a5e5 service nova] [instance: 85c6b6af-f1b5-47ca-8e02-61c3669ec02b] Updated VIF entry in instance network info cache for port 99dfe6b7-84fa-445e-829b-760e7f7303a6. {{(pid=61962) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 701.140667] env[61962]: DEBUG nova.network.neutron [req-ba59ba10-f30a-4717-ba59-dab4bf9a7d9a req-423765a5-bb76-40f7-b264-c43ee509a5e5 service nova] [instance: 85c6b6af-f1b5-47ca-8e02-61c3669ec02b] Updating instance_info_cache with network_info: [{"id": "99dfe6b7-84fa-445e-829b-760e7f7303a6", "address": "fa:16:3e:61:fa:43", "network": {"id": "5b523553-6f39-4db6-96a6-a28671c489ae", "bridge": "br-int", "label": "tempest-MultipleCreateTestJSON-294270932-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "ed943c69c73d48e2b1b2bab311295d84", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "8ab9e5e6-9bf8-4a8d-91c8-d22148e3d2ee", "external-id": "nsx-vlan-transportzone-401", "segmentation_id": 401, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap99dfe6b7-84", "ovs_interfaceid": "99dfe6b7-84fa-445e-829b-760e7f7303a6", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61962) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 701.150516] env[61962]: DEBUG oslo_concurrency.lockutils [req-ba59ba10-f30a-4717-ba59-dab4bf9a7d9a req-423765a5-bb76-40f7-b264-c43ee509a5e5 service nova] Releasing lock "refresh_cache-85c6b6af-f1b5-47ca-8e02-61c3669ec02b" {{(pid=61962) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 707.214192] env[61962]: DEBUG oslo_concurrency.lockutils [None req-aeada297-64d9-442f-a2b8-fc450e4cd4d2 tempest-ServersAdminTestJSON-987189382 tempest-ServersAdminTestJSON-987189382-project-member] Acquiring lock "1dcafe3b-989b-4290-be54-ceccc62a323f" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61962) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 707.214192] env[61962]: DEBUG oslo_concurrency.lockutils [None req-aeada297-64d9-442f-a2b8-fc450e4cd4d2 tempest-ServersAdminTestJSON-987189382 tempest-ServersAdminTestJSON-987189382-project-member] Lock "1dcafe3b-989b-4290-be54-ceccc62a323f" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61962) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 707.812945] env[61962]: DEBUG oslo_concurrency.lockutils [None req-af11b868-1c66-47f6-8ccb-134c532e2986 tempest-ServersAdminTestJSON-987189382 tempest-ServersAdminTestJSON-987189382-project-member] Acquiring lock "3bbd6215-436e-4d30-9c07-b3f69002f3bf" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61962) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 707.812945] env[61962]: DEBUG oslo_concurrency.lockutils [None req-af11b868-1c66-47f6-8ccb-134c532e2986 tempest-ServersAdminTestJSON-987189382 tempest-ServersAdminTestJSON-987189382-project-member] Lock "3bbd6215-436e-4d30-9c07-b3f69002f3bf" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61962) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 739.154531] env[61962]: DEBUG oslo_service.periodic_task [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Running periodic task ComputeManager._sync_scheduler_instance_info {{(pid=61962) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 739.180289] env[61962]: DEBUG oslo_service.periodic_task [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=61962) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 740.158548] env[61962]: DEBUG oslo_service.periodic_task [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=61962) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 740.158860] env[61962]: DEBUG nova.compute.manager [None req-ba337279-4320-40ca-b616-7e590432f203 None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=61962) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10626}} [ 740.158911] env[61962]: DEBUG oslo_service.periodic_task [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Running periodic task ComputeManager.update_available_resource {{(pid=61962) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 740.170787] env[61962]: DEBUG oslo_concurrency.lockutils [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=61962) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 740.170787] env[61962]: DEBUG oslo_concurrency.lockutils [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=61962) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 740.170985] env[61962]: DEBUG oslo_concurrency.lockutils [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=61962) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 740.172035] env[61962]: DEBUG nova.compute.resource_tracker [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=61962) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 740.172381] env[61962]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b22ac91d-590e-49c2-aa67-30c8b0bb28e7 {{(pid=61962) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 740.182605] env[61962]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bfb5b6fd-7aae-4b74-858b-a34cd9d6a7e0 {{(pid=61962) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 740.199479] env[61962]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-10fc0d4b-3ede-43c2-a0e2-d04a7fe9ab66 {{(pid=61962) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 740.207167] env[61962]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ccc7cf53-5bd9-4984-89aa-37aa77a16872 {{(pid=61962) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 740.240296] env[61962]: DEBUG nova.compute.resource_tracker [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=180554MB free_disk=96GB free_vcpus=48 pci_devices=None {{(pid=61962) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 740.240503] env[61962]: DEBUG oslo_concurrency.lockutils [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=61962) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 740.241840] env[61962]: DEBUG oslo_concurrency.lockutils [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=61962) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 740.317385] env[61962]: DEBUG nova.compute.resource_tracker [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Instance dc6c9cb6-419f-4122-8351-0c517fb8616d actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61962) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 740.317534] env[61962]: DEBUG nova.compute.resource_tracker [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Instance 6ba9719c-3689-4db6-8ab2-441dc52c7bfd actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61962) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 740.317729] env[61962]: DEBUG nova.compute.resource_tracker [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Instance 1a7d5a5f-c5b1-40d2-828b-090fb8e76e3d actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61962) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 740.318031] env[61962]: DEBUG nova.compute.resource_tracker [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Instance d0df932e-822b-4c60-a588-445aa2f14cdb actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61962) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 740.318031] env[61962]: DEBUG nova.compute.resource_tracker [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Instance a6e6f30d-c667-4f70-bf60-596a8fe97188 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61962) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 740.318142] env[61962]: DEBUG nova.compute.resource_tracker [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Instance b9782756-27e1-46fc-940f-069cfb9ab9f9 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61962) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 740.318236] env[61962]: DEBUG nova.compute.resource_tracker [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Instance 667f56c0-896f-4e25-9e8a-3954f7733dc0 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61962) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 740.318332] env[61962]: DEBUG nova.compute.resource_tracker [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Instance 295064f0-9677-4287-877a-ef2a33a87fef actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61962) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 740.318448] env[61962]: DEBUG nova.compute.resource_tracker [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Instance 6996289c-f881-4733-8943-98e3633e43ca actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61962) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 740.318560] env[61962]: DEBUG nova.compute.resource_tracker [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Instance 85c6b6af-f1b5-47ca-8e02-61c3669ec02b actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61962) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 740.331168] env[61962]: DEBUG nova.compute.resource_tracker [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Instance 0aa340f7-a219-47f5-9d7d-dd4062643cf7 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61962) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1767}} [ 740.341756] env[61962]: DEBUG nova.compute.resource_tracker [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Instance 968692bd-12ae-46aa-b1de-5cd312f436ee has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61962) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1767}} [ 740.352020] env[61962]: DEBUG nova.compute.resource_tracker [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Instance 09098197-59d7-4e14-b76c-8366a1ea5fd6 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61962) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1767}} [ 740.362035] env[61962]: DEBUG nova.compute.resource_tracker [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Instance 4fcdc4fd-b1f8-4188-a502-49cf675c9bdd has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61962) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1767}} [ 740.373368] env[61962]: DEBUG nova.compute.resource_tracker [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Instance d6fc81f2-0fa6-4e9c-8d1b-60f0038112b0 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61962) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1767}} [ 740.385379] env[61962]: DEBUG nova.compute.resource_tracker [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Instance 511ce01a-0bea-49ba-b0e3-2a006ee88d32 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61962) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1767}} [ 740.395752] env[61962]: DEBUG nova.compute.resource_tracker [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Instance b241f341-8413-4f6f-b054-d9209640d652 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61962) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1767}} [ 740.406559] env[61962]: DEBUG nova.compute.resource_tracker [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Instance cfc08011-0291-450a-96eb-c4f8002e07e5 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61962) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1767}} [ 740.416986] env[61962]: DEBUG nova.compute.resource_tracker [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Instance a20846de-721a-4bb9-9826-e7c7ebf170c4 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61962) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1767}} [ 740.428405] env[61962]: DEBUG nova.compute.resource_tracker [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Instance ff7d9da5-bc9a-41f2-a2e4-521d87b22117 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61962) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1767}} [ 740.439871] env[61962]: DEBUG nova.compute.resource_tracker [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Instance 2dad2cb0-56e2-4fb6-b540-0fd98265697f has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61962) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1767}} [ 740.452020] env[61962]: DEBUG nova.compute.resource_tracker [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Instance 8436449e-8f8d-4fd0-96be-31f1464f03ae has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61962) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1767}} [ 740.462734] env[61962]: DEBUG nova.compute.resource_tracker [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Instance 92f66377-ddec-4312-b10c-f7edc4855d48 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61962) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1767}} [ 740.475675] env[61962]: DEBUG nova.compute.resource_tracker [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Instance 17bc2689-2250-48bc-bc0e-47821b20d3ba has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61962) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1767}} [ 740.487490] env[61962]: DEBUG nova.compute.resource_tracker [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Instance 146b08c3-3893-4c0e-9448-05a988c19ae8 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61962) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1767}} [ 740.501624] env[61962]: DEBUG nova.compute.resource_tracker [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Instance 2ef4b06a-c5c6-46ed-89d8-30143aa30a25 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61962) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1767}} [ 740.512745] env[61962]: DEBUG nova.compute.resource_tracker [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Instance 2142b6e8-d060-48eb-8ff1-3e92e6188a83 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61962) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1767}} [ 740.528640] env[61962]: DEBUG nova.compute.resource_tracker [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Instance dbe5937a-cf98-4abc-ad5a-521762e0be9d has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61962) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1767}} [ 740.544159] env[61962]: DEBUG nova.compute.resource_tracker [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Instance b387b903-2f5c-40ad-aeea-b013fb66e17d has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61962) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1767}} [ 740.555058] env[61962]: DEBUG nova.compute.resource_tracker [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Instance bf82c44c-95ce-4e4c-a32f-444bba1ef5cd has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61962) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1767}} [ 740.566602] env[61962]: DEBUG nova.compute.resource_tracker [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Instance 1dcafe3b-989b-4290-be54-ceccc62a323f has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61962) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1767}} [ 740.578210] env[61962]: DEBUG nova.compute.resource_tracker [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Instance 3bbd6215-436e-4d30-9c07-b3f69002f3bf has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61962) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1767}} [ 740.578511] env[61962]: DEBUG nova.compute.resource_tracker [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Total usable vcpus: 48, total allocated vcpus: 10 {{(pid=61962) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 740.578706] env[61962]: DEBUG nova.compute.resource_tracker [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=1792MB phys_disk=100GB used_disk=10GB total_vcpus=48 used_vcpus=10 pci_stats=[] stats={'failed_builds': '3', 'num_instances': '10', 'num_vm_building': '10', 'num_task_spawning': '10', 'num_os_type_None': '10', 'num_proj_f1aff07cb3e74298b4e347bd4176fbf8': '1', 'io_workload': '10', 'num_proj_1dd984fe1bf84471ae103d28565d160b': '1', 'num_proj_13cf9ead87c749fc963fbe35b22b4fd6': '1', 'num_proj_e58b68272c404983993a09ad12e8c03b': '1', 'num_proj_6e7e747becdf4976b570b79d4b20d88b': '1', 'num_proj_a7b748ae1e6d4e17bc463811008c5596': '1', 'num_proj_6c00b1a76e464ad2a9788673e256de22': '1', 'num_proj_78e44aa9cd5642928627210447b3e177': '1', 'num_proj_b4cf0a2155644882b4bf42e69e210138': '1', 'num_proj_ed943c69c73d48e2b1b2bab311295d84': '1'} {{(pid=61962) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 740.987632] env[61962]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5d14304f-c49b-4c0d-b6d8-e0dd007befab {{(pid=61962) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 740.995937] env[61962]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-227e5529-8543-4cbd-a1b6-2599e47ee033 {{(pid=61962) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 741.026872] env[61962]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cb3ac03c-29ae-4f51-b78d-c41a50bd693b {{(pid=61962) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 741.035978] env[61962]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cfbf353f-211b-453d-9616-b5aab5a1f333 {{(pid=61962) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 741.049831] env[61962]: DEBUG nova.compute.provider_tree [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Inventory has not changed in ProviderTree for provider: 5a20dc57-fddd-49ec-bab5-953a03eebaa1 {{(pid=61962) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 741.058867] env[61962]: DEBUG nova.scheduler.client.report [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Inventory has not changed for provider 5a20dc57-fddd-49ec-bab5-953a03eebaa1 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 96, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61962) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 741.072799] env[61962]: DEBUG nova.compute.resource_tracker [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=61962) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 741.073034] env[61962]: DEBUG oslo_concurrency.lockutils [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 0.832s {{(pid=61962) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 741.779299] env[61962]: WARNING oslo_vmware.rw_handles [None req-21d4b7db-8868-406d-878a-d4e21834a00a tempest-ServerExternalEventsTest-1541707263 tempest-ServerExternalEventsTest-1541707263-project-member] Error occurred while reading the HTTP response.: http.client.RemoteDisconnected: Remote end closed connection without response [ 741.779299] env[61962]: ERROR oslo_vmware.rw_handles Traceback (most recent call last): [ 741.779299] env[61962]: ERROR oslo_vmware.rw_handles File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py", line 283, in close [ 741.779299] env[61962]: ERROR oslo_vmware.rw_handles self._conn.getresponse() [ 741.779299] env[61962]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 1375, in getresponse [ 741.779299] env[61962]: ERROR oslo_vmware.rw_handles response.begin() [ 741.779299] env[61962]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 318, in begin [ 741.779299] env[61962]: ERROR oslo_vmware.rw_handles version, status, reason = self._read_status() [ 741.779299] env[61962]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 287, in _read_status [ 741.779299] env[61962]: ERROR oslo_vmware.rw_handles raise RemoteDisconnected("Remote end closed connection without" [ 741.779299] env[61962]: ERROR oslo_vmware.rw_handles http.client.RemoteDisconnected: Remote end closed connection without response [ 741.779299] env[61962]: ERROR oslo_vmware.rw_handles [ 741.779827] env[61962]: DEBUG nova.virt.vmwareapi.images [None req-21d4b7db-8868-406d-878a-d4e21834a00a tempest-ServerExternalEventsTest-1541707263 tempest-ServerExternalEventsTest-1541707263-project-member] [instance: dc6c9cb6-419f-4122-8351-0c517fb8616d] Downloaded image file data f684bb17-3ab2-4bd5-a19e-4c36c57d0ebe to vmware_temp/22e5db54-218f-4e68-b00b-b94ed365a30f/f684bb17-3ab2-4bd5-a19e-4c36c57d0ebe/tmp-sparse.vmdk on the data store datastore2 {{(pid=61962) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:258}} [ 741.781484] env[61962]: DEBUG nova.virt.vmwareapi.vmops [None req-21d4b7db-8868-406d-878a-d4e21834a00a tempest-ServerExternalEventsTest-1541707263 tempest-ServerExternalEventsTest-1541707263-project-member] [instance: dc6c9cb6-419f-4122-8351-0c517fb8616d] Caching image {{(pid=61962) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 741.781754] env[61962]: DEBUG nova.virt.vmwareapi.vm_util [None req-21d4b7db-8868-406d-878a-d4e21834a00a tempest-ServerExternalEventsTest-1541707263 tempest-ServerExternalEventsTest-1541707263-project-member] Copying Virtual Disk [datastore2] vmware_temp/22e5db54-218f-4e68-b00b-b94ed365a30f/f684bb17-3ab2-4bd5-a19e-4c36c57d0ebe/tmp-sparse.vmdk to [datastore2] vmware_temp/22e5db54-218f-4e68-b00b-b94ed365a30f/f684bb17-3ab2-4bd5-a19e-4c36c57d0ebe/f684bb17-3ab2-4bd5-a19e-4c36c57d0ebe.vmdk {{(pid=61962) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 741.782062] env[61962]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-ce91a860-8c14-4956-83fe-1028c0394247 {{(pid=61962) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 741.791734] env[61962]: DEBUG oslo_vmware.api [None req-21d4b7db-8868-406d-878a-d4e21834a00a tempest-ServerExternalEventsTest-1541707263 tempest-ServerExternalEventsTest-1541707263-project-member] Waiting for the task: (returnval){ [ 741.791734] env[61962]: value = "task-4891958" [ 741.791734] env[61962]: _type = "Task" [ 741.791734] env[61962]: } to complete. {{(pid=61962) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 741.800989] env[61962]: DEBUG oslo_vmware.api [None req-21d4b7db-8868-406d-878a-d4e21834a00a tempest-ServerExternalEventsTest-1541707263 tempest-ServerExternalEventsTest-1541707263-project-member] Task: {'id': task-4891958, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61962) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 742.067985] env[61962]: DEBUG oslo_service.periodic_task [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=61962) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 742.158441] env[61962]: DEBUG oslo_service.periodic_task [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=61962) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 742.158621] env[61962]: DEBUG nova.compute.manager [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Starting heal instance info cache {{(pid=61962) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10007}} [ 742.158741] env[61962]: DEBUG nova.compute.manager [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Rebuilding the list of instances to heal {{(pid=61962) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10011}} [ 742.179296] env[61962]: DEBUG nova.compute.manager [None req-ba337279-4320-40ca-b616-7e590432f203 None None] [instance: dc6c9cb6-419f-4122-8351-0c517fb8616d] Skipping network cache update for instance because it is Building. {{(pid=61962) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10020}} [ 742.179440] env[61962]: DEBUG nova.compute.manager [None req-ba337279-4320-40ca-b616-7e590432f203 None None] [instance: 6ba9719c-3689-4db6-8ab2-441dc52c7bfd] Skipping network cache update for instance because it is Building. {{(pid=61962) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10020}} [ 742.179574] env[61962]: DEBUG nova.compute.manager [None req-ba337279-4320-40ca-b616-7e590432f203 None None] [instance: 1a7d5a5f-c5b1-40d2-828b-090fb8e76e3d] Skipping network cache update for instance because it is Building. {{(pid=61962) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10020}} [ 742.179701] env[61962]: DEBUG nova.compute.manager [None req-ba337279-4320-40ca-b616-7e590432f203 None None] [instance: d0df932e-822b-4c60-a588-445aa2f14cdb] Skipping network cache update for instance because it is Building. {{(pid=61962) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10020}} [ 742.179824] env[61962]: DEBUG nova.compute.manager [None req-ba337279-4320-40ca-b616-7e590432f203 None None] [instance: a6e6f30d-c667-4f70-bf60-596a8fe97188] Skipping network cache update for instance because it is Building. {{(pid=61962) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10020}} [ 742.179944] env[61962]: DEBUG nova.compute.manager [None req-ba337279-4320-40ca-b616-7e590432f203 None None] [instance: b9782756-27e1-46fc-940f-069cfb9ab9f9] Skipping network cache update for instance because it is Building. {{(pid=61962) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10020}} [ 742.180098] env[61962]: DEBUG nova.compute.manager [None req-ba337279-4320-40ca-b616-7e590432f203 None None] [instance: 667f56c0-896f-4e25-9e8a-3954f7733dc0] Skipping network cache update for instance because it is Building. {{(pid=61962) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10020}} [ 742.180226] env[61962]: DEBUG nova.compute.manager [None req-ba337279-4320-40ca-b616-7e590432f203 None None] [instance: 295064f0-9677-4287-877a-ef2a33a87fef] Skipping network cache update for instance because it is Building. {{(pid=61962) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10020}} [ 742.180343] env[61962]: DEBUG nova.compute.manager [None req-ba337279-4320-40ca-b616-7e590432f203 None None] [instance: 6996289c-f881-4733-8943-98e3633e43ca] Skipping network cache update for instance because it is Building. {{(pid=61962) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10020}} [ 742.180460] env[61962]: DEBUG nova.compute.manager [None req-ba337279-4320-40ca-b616-7e590432f203 None None] [instance: 85c6b6af-f1b5-47ca-8e02-61c3669ec02b] Skipping network cache update for instance because it is Building. {{(pid=61962) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10020}} [ 742.180580] env[61962]: DEBUG nova.compute.manager [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Didn't find any instances for network info cache update. {{(pid=61962) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10093}} [ 742.181073] env[61962]: DEBUG oslo_service.periodic_task [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=61962) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 742.181246] env[61962]: DEBUG oslo_service.periodic_task [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=61962) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 742.302277] env[61962]: DEBUG oslo_vmware.exceptions [None req-21d4b7db-8868-406d-878a-d4e21834a00a tempest-ServerExternalEventsTest-1541707263 tempest-ServerExternalEventsTest-1541707263-project-member] Fault InvalidArgument not matched. {{(pid=61962) get_fault_class /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/exceptions.py:290}} [ 742.302462] env[61962]: DEBUG oslo_concurrency.lockutils [None req-21d4b7db-8868-406d-878a-d4e21834a00a tempest-ServerExternalEventsTest-1541707263 tempest-ServerExternalEventsTest-1541707263-project-member] Releasing lock "[datastore2] devstack-image-cache_base/f684bb17-3ab2-4bd5-a19e-4c36c57d0ebe/f684bb17-3ab2-4bd5-a19e-4c36c57d0ebe.vmdk" {{(pid=61962) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 742.303032] env[61962]: ERROR nova.compute.manager [None req-21d4b7db-8868-406d-878a-d4e21834a00a tempest-ServerExternalEventsTest-1541707263 tempest-ServerExternalEventsTest-1541707263-project-member] [instance: dc6c9cb6-419f-4122-8351-0c517fb8616d] Instance failed to spawn: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 742.303032] env[61962]: Faults: ['InvalidArgument'] [ 742.303032] env[61962]: ERROR nova.compute.manager [instance: dc6c9cb6-419f-4122-8351-0c517fb8616d] Traceback (most recent call last): [ 742.303032] env[61962]: ERROR nova.compute.manager [instance: dc6c9cb6-419f-4122-8351-0c517fb8616d] File "/opt/stack/nova/nova/compute/manager.py", line 2886, in _build_resources [ 742.303032] env[61962]: ERROR nova.compute.manager [instance: dc6c9cb6-419f-4122-8351-0c517fb8616d] yield resources [ 742.303032] env[61962]: ERROR nova.compute.manager [instance: dc6c9cb6-419f-4122-8351-0c517fb8616d] File "/opt/stack/nova/nova/compute/manager.py", line 2633, in _build_and_run_instance [ 742.303032] env[61962]: ERROR nova.compute.manager [instance: dc6c9cb6-419f-4122-8351-0c517fb8616d] self.driver.spawn(context, instance, image_meta, [ 742.303032] env[61962]: ERROR nova.compute.manager [instance: dc6c9cb6-419f-4122-8351-0c517fb8616d] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 742.303032] env[61962]: ERROR nova.compute.manager [instance: dc6c9cb6-419f-4122-8351-0c517fb8616d] self._vmops.spawn(context, instance, image_meta, injected_files, [ 742.303032] env[61962]: ERROR nova.compute.manager [instance: dc6c9cb6-419f-4122-8351-0c517fb8616d] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 742.303032] env[61962]: ERROR nova.compute.manager [instance: dc6c9cb6-419f-4122-8351-0c517fb8616d] self._fetch_image_if_missing(context, vi) [ 742.303032] env[61962]: ERROR nova.compute.manager [instance: dc6c9cb6-419f-4122-8351-0c517fb8616d] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 742.303684] env[61962]: ERROR nova.compute.manager [instance: dc6c9cb6-419f-4122-8351-0c517fb8616d] image_cache(vi, tmp_image_ds_loc) [ 742.303684] env[61962]: ERROR nova.compute.manager [instance: dc6c9cb6-419f-4122-8351-0c517fb8616d] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 742.303684] env[61962]: ERROR nova.compute.manager [instance: dc6c9cb6-419f-4122-8351-0c517fb8616d] vm_util.copy_virtual_disk( [ 742.303684] env[61962]: ERROR nova.compute.manager [instance: dc6c9cb6-419f-4122-8351-0c517fb8616d] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 742.303684] env[61962]: ERROR nova.compute.manager [instance: dc6c9cb6-419f-4122-8351-0c517fb8616d] session._wait_for_task(vmdk_copy_task) [ 742.303684] env[61962]: ERROR nova.compute.manager [instance: dc6c9cb6-419f-4122-8351-0c517fb8616d] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 742.303684] env[61962]: ERROR nova.compute.manager [instance: dc6c9cb6-419f-4122-8351-0c517fb8616d] return self.wait_for_task(task_ref) [ 742.303684] env[61962]: ERROR nova.compute.manager [instance: dc6c9cb6-419f-4122-8351-0c517fb8616d] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 742.303684] env[61962]: ERROR nova.compute.manager [instance: dc6c9cb6-419f-4122-8351-0c517fb8616d] return evt.wait() [ 742.303684] env[61962]: ERROR nova.compute.manager [instance: dc6c9cb6-419f-4122-8351-0c517fb8616d] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 742.303684] env[61962]: ERROR nova.compute.manager [instance: dc6c9cb6-419f-4122-8351-0c517fb8616d] result = hub.switch() [ 742.303684] env[61962]: ERROR nova.compute.manager [instance: dc6c9cb6-419f-4122-8351-0c517fb8616d] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 742.303684] env[61962]: ERROR nova.compute.manager [instance: dc6c9cb6-419f-4122-8351-0c517fb8616d] return self.greenlet.switch() [ 742.304089] env[61962]: ERROR nova.compute.manager [instance: dc6c9cb6-419f-4122-8351-0c517fb8616d] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 742.304089] env[61962]: ERROR nova.compute.manager [instance: dc6c9cb6-419f-4122-8351-0c517fb8616d] self.f(*self.args, **self.kw) [ 742.304089] env[61962]: ERROR nova.compute.manager [instance: dc6c9cb6-419f-4122-8351-0c517fb8616d] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 742.304089] env[61962]: ERROR nova.compute.manager [instance: dc6c9cb6-419f-4122-8351-0c517fb8616d] raise exceptions.translate_fault(task_info.error) [ 742.304089] env[61962]: ERROR nova.compute.manager [instance: dc6c9cb6-419f-4122-8351-0c517fb8616d] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 742.304089] env[61962]: ERROR nova.compute.manager [instance: dc6c9cb6-419f-4122-8351-0c517fb8616d] Faults: ['InvalidArgument'] [ 742.304089] env[61962]: ERROR nova.compute.manager [instance: dc6c9cb6-419f-4122-8351-0c517fb8616d] [ 742.304089] env[61962]: INFO nova.compute.manager [None req-21d4b7db-8868-406d-878a-d4e21834a00a tempest-ServerExternalEventsTest-1541707263 tempest-ServerExternalEventsTest-1541707263-project-member] [instance: dc6c9cb6-419f-4122-8351-0c517fb8616d] Terminating instance [ 742.304944] env[61962]: DEBUG oslo_concurrency.lockutils [None req-f081d78e-1cf7-4043-9a57-caa0299432de tempest-ImagesOneServerTestJSON-2032343034 tempest-ImagesOneServerTestJSON-2032343034-project-member] Acquired lock "[datastore2] devstack-image-cache_base/f684bb17-3ab2-4bd5-a19e-4c36c57d0ebe/f684bb17-3ab2-4bd5-a19e-4c36c57d0ebe.vmdk" {{(pid=61962) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 742.305175] env[61962]: DEBUG nova.virt.vmwareapi.ds_util [None req-f081d78e-1cf7-4043-9a57-caa0299432de tempest-ImagesOneServerTestJSON-2032343034 tempest-ImagesOneServerTestJSON-2032343034-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=61962) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 742.305833] env[61962]: DEBUG nova.compute.manager [None req-21d4b7db-8868-406d-878a-d4e21834a00a tempest-ServerExternalEventsTest-1541707263 tempest-ServerExternalEventsTest-1541707263-project-member] [instance: dc6c9cb6-419f-4122-8351-0c517fb8616d] Start destroying the instance on the hypervisor. {{(pid=61962) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3142}} [ 742.306022] env[61962]: DEBUG nova.virt.vmwareapi.vmops [None req-21d4b7db-8868-406d-878a-d4e21834a00a tempest-ServerExternalEventsTest-1541707263 tempest-ServerExternalEventsTest-1541707263-project-member] [instance: dc6c9cb6-419f-4122-8351-0c517fb8616d] Destroying instance {{(pid=61962) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 742.306254] env[61962]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-2709aa14-e576-45b0-912b-a021c2fb65a6 {{(pid=61962) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 742.309046] env[61962]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-df2cf451-42be-4f7f-b80a-f345b44ae724 {{(pid=61962) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 742.316912] env[61962]: DEBUG nova.virt.vmwareapi.vmops [None req-21d4b7db-8868-406d-878a-d4e21834a00a tempest-ServerExternalEventsTest-1541707263 tempest-ServerExternalEventsTest-1541707263-project-member] [instance: dc6c9cb6-419f-4122-8351-0c517fb8616d] Unregistering the VM {{(pid=61962) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 742.317199] env[61962]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-d77128c0-03a6-4eaa-a167-e548cc7e4f5e {{(pid=61962) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 742.319927] env[61962]: DEBUG nova.virt.vmwareapi.ds_util [None req-f081d78e-1cf7-4043-9a57-caa0299432de tempest-ImagesOneServerTestJSON-2032343034 tempest-ImagesOneServerTestJSON-2032343034-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=61962) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 742.320236] env[61962]: DEBUG nova.virt.vmwareapi.vmops [None req-f081d78e-1cf7-4043-9a57-caa0299432de tempest-ImagesOneServerTestJSON-2032343034 tempest-ImagesOneServerTestJSON-2032343034-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=61962) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 742.321156] env[61962]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-edd91bf9-90a3-4db2-8ec6-05cb0149a5fe {{(pid=61962) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 742.326893] env[61962]: DEBUG oslo_vmware.api [None req-f081d78e-1cf7-4043-9a57-caa0299432de tempest-ImagesOneServerTestJSON-2032343034 tempest-ImagesOneServerTestJSON-2032343034-project-member] Waiting for the task: (returnval){ [ 742.326893] env[61962]: value = "session[5210918f-aadc-9b29-42fa-0929c2e42627]526236a0-2bad-69dc-8304-93ec3d7fa809" [ 742.326893] env[61962]: _type = "Task" [ 742.326893] env[61962]: } to complete. {{(pid=61962) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 742.339929] env[61962]: DEBUG oslo_vmware.api [None req-f081d78e-1cf7-4043-9a57-caa0299432de tempest-ImagesOneServerTestJSON-2032343034 tempest-ImagesOneServerTestJSON-2032343034-project-member] Task: {'id': session[5210918f-aadc-9b29-42fa-0929c2e42627]526236a0-2bad-69dc-8304-93ec3d7fa809, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61962) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 742.399206] env[61962]: DEBUG nova.virt.vmwareapi.vmops [None req-21d4b7db-8868-406d-878a-d4e21834a00a tempest-ServerExternalEventsTest-1541707263 tempest-ServerExternalEventsTest-1541707263-project-member] [instance: dc6c9cb6-419f-4122-8351-0c517fb8616d] Unregistered the VM {{(pid=61962) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 742.399935] env[61962]: DEBUG nova.virt.vmwareapi.vmops [None req-21d4b7db-8868-406d-878a-d4e21834a00a tempest-ServerExternalEventsTest-1541707263 tempest-ServerExternalEventsTest-1541707263-project-member] [instance: dc6c9cb6-419f-4122-8351-0c517fb8616d] Deleting contents of the VM from datastore datastore2 {{(pid=61962) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 742.400176] env[61962]: DEBUG nova.virt.vmwareapi.ds_util [None req-21d4b7db-8868-406d-878a-d4e21834a00a tempest-ServerExternalEventsTest-1541707263 tempest-ServerExternalEventsTest-1541707263-project-member] Deleting the datastore file [datastore2] dc6c9cb6-419f-4122-8351-0c517fb8616d {{(pid=61962) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 742.400454] env[61962]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-ce22fc5a-6366-40d3-a756-909e4fd4a738 {{(pid=61962) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 742.408807] env[61962]: DEBUG oslo_vmware.api [None req-21d4b7db-8868-406d-878a-d4e21834a00a tempest-ServerExternalEventsTest-1541707263 tempest-ServerExternalEventsTest-1541707263-project-member] Waiting for the task: (returnval){ [ 742.408807] env[61962]: value = "task-4891960" [ 742.408807] env[61962]: _type = "Task" [ 742.408807] env[61962]: } to complete. {{(pid=61962) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 742.420368] env[61962]: DEBUG oslo_vmware.api [None req-21d4b7db-8868-406d-878a-d4e21834a00a tempest-ServerExternalEventsTest-1541707263 tempest-ServerExternalEventsTest-1541707263-project-member] Task: {'id': task-4891960, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61962) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 742.838105] env[61962]: DEBUG nova.virt.vmwareapi.vmops [None req-f081d78e-1cf7-4043-9a57-caa0299432de tempest-ImagesOneServerTestJSON-2032343034 tempest-ImagesOneServerTestJSON-2032343034-project-member] [instance: 6ba9719c-3689-4db6-8ab2-441dc52c7bfd] Preparing fetch location {{(pid=61962) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 742.838409] env[61962]: DEBUG nova.virt.vmwareapi.ds_util [None req-f081d78e-1cf7-4043-9a57-caa0299432de tempest-ImagesOneServerTestJSON-2032343034 tempest-ImagesOneServerTestJSON-2032343034-project-member] Creating directory with path [datastore2] vmware_temp/d409585b-3eec-4468-8a8c-c762413e8161/f684bb17-3ab2-4bd5-a19e-4c36c57d0ebe {{(pid=61962) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 742.838631] env[61962]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-73948ef2-383c-4824-b985-ec76cadfb808 {{(pid=61962) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 742.850864] env[61962]: DEBUG nova.virt.vmwareapi.ds_util [None req-f081d78e-1cf7-4043-9a57-caa0299432de tempest-ImagesOneServerTestJSON-2032343034 tempest-ImagesOneServerTestJSON-2032343034-project-member] Created directory with path [datastore2] vmware_temp/d409585b-3eec-4468-8a8c-c762413e8161/f684bb17-3ab2-4bd5-a19e-4c36c57d0ebe {{(pid=61962) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 742.851075] env[61962]: DEBUG nova.virt.vmwareapi.vmops [None req-f081d78e-1cf7-4043-9a57-caa0299432de tempest-ImagesOneServerTestJSON-2032343034 tempest-ImagesOneServerTestJSON-2032343034-project-member] [instance: 6ba9719c-3689-4db6-8ab2-441dc52c7bfd] Fetch image to [datastore2] vmware_temp/d409585b-3eec-4468-8a8c-c762413e8161/f684bb17-3ab2-4bd5-a19e-4c36c57d0ebe/tmp-sparse.vmdk {{(pid=61962) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 742.851257] env[61962]: DEBUG nova.virt.vmwareapi.vmops [None req-f081d78e-1cf7-4043-9a57-caa0299432de tempest-ImagesOneServerTestJSON-2032343034 tempest-ImagesOneServerTestJSON-2032343034-project-member] [instance: 6ba9719c-3689-4db6-8ab2-441dc52c7bfd] Downloading image file data f684bb17-3ab2-4bd5-a19e-4c36c57d0ebe to [datastore2] vmware_temp/d409585b-3eec-4468-8a8c-c762413e8161/f684bb17-3ab2-4bd5-a19e-4c36c57d0ebe/tmp-sparse.vmdk on the data store datastore2 {{(pid=61962) _fetch_image_as_file /opt/stack/nova/nova/virt/vmwareapi/vmops.py:399}} [ 742.852288] env[61962]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fa1e7f62-2bb1-4ab5-9215-336127b1b189 {{(pid=61962) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 742.860050] env[61962]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6a26808f-0cb1-48dd-ab89-12c558afba6a {{(pid=61962) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 742.869015] env[61962]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6c2929ab-4872-49ed-9be1-709d8d702b3c {{(pid=61962) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 742.901653] env[61962]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-978571b7-debd-4dbd-9e49-2d3636c7f67c {{(pid=61962) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 742.908264] env[61962]: DEBUG oslo_vmware.service [-] Invoking SessionManager.AcquireGenericServiceTicket with opID=oslo.vmware-ecb990ea-1b31-43d0-9840-0735665b3d1b {{(pid=61962) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 742.917849] env[61962]: DEBUG oslo_vmware.api [None req-21d4b7db-8868-406d-878a-d4e21834a00a tempest-ServerExternalEventsTest-1541707263 tempest-ServerExternalEventsTest-1541707263-project-member] Task: {'id': task-4891960, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.081956} completed successfully. {{(pid=61962) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 742.918101] env[61962]: DEBUG nova.virt.vmwareapi.ds_util [None req-21d4b7db-8868-406d-878a-d4e21834a00a tempest-ServerExternalEventsTest-1541707263 tempest-ServerExternalEventsTest-1541707263-project-member] Deleted the datastore file {{(pid=61962) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 742.918288] env[61962]: DEBUG nova.virt.vmwareapi.vmops [None req-21d4b7db-8868-406d-878a-d4e21834a00a tempest-ServerExternalEventsTest-1541707263 tempest-ServerExternalEventsTest-1541707263-project-member] [instance: dc6c9cb6-419f-4122-8351-0c517fb8616d] Deleted contents of the VM from datastore datastore2 {{(pid=61962) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 742.918458] env[61962]: DEBUG nova.virt.vmwareapi.vmops [None req-21d4b7db-8868-406d-878a-d4e21834a00a tempest-ServerExternalEventsTest-1541707263 tempest-ServerExternalEventsTest-1541707263-project-member] [instance: dc6c9cb6-419f-4122-8351-0c517fb8616d] Instance destroyed {{(pid=61962) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 742.918627] env[61962]: INFO nova.compute.manager [None req-21d4b7db-8868-406d-878a-d4e21834a00a tempest-ServerExternalEventsTest-1541707263 tempest-ServerExternalEventsTest-1541707263-project-member] [instance: dc6c9cb6-419f-4122-8351-0c517fb8616d] Took 0.61 seconds to destroy the instance on the hypervisor. [ 742.920921] env[61962]: DEBUG nova.compute.claims [None req-21d4b7db-8868-406d-878a-d4e21834a00a tempest-ServerExternalEventsTest-1541707263 tempest-ServerExternalEventsTest-1541707263-project-member] [instance: dc6c9cb6-419f-4122-8351-0c517fb8616d] Aborting claim: {{(pid=61962) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 742.921102] env[61962]: DEBUG oslo_concurrency.lockutils [None req-21d4b7db-8868-406d-878a-d4e21834a00a tempest-ServerExternalEventsTest-1541707263 tempest-ServerExternalEventsTest-1541707263-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=61962) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 742.921321] env[61962]: DEBUG oslo_concurrency.lockutils [None req-21d4b7db-8868-406d-878a-d4e21834a00a tempest-ServerExternalEventsTest-1541707263 tempest-ServerExternalEventsTest-1541707263-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 0.000s {{(pid=61962) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 742.933936] env[61962]: DEBUG nova.virt.vmwareapi.images [None req-f081d78e-1cf7-4043-9a57-caa0299432de tempest-ImagesOneServerTestJSON-2032343034 tempest-ImagesOneServerTestJSON-2032343034-project-member] [instance: 6ba9719c-3689-4db6-8ab2-441dc52c7bfd] Downloading image file data f684bb17-3ab2-4bd5-a19e-4c36c57d0ebe to the data store datastore2 {{(pid=61962) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:245}} [ 742.993817] env[61962]: DEBUG oslo_vmware.rw_handles [None req-f081d78e-1cf7-4043-9a57-caa0299432de tempest-ImagesOneServerTestJSON-2032343034 tempest-ImagesOneServerTestJSON-2032343034-project-member] Creating HTTP connection to write to file with size = 21318656 and URL = https://esx7c1n2.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/d409585b-3eec-4468-8a8c-c762413e8161/f684bb17-3ab2-4bd5-a19e-4c36c57d0ebe/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=61962) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 743.056423] env[61962]: DEBUG oslo_vmware.rw_handles [None req-f081d78e-1cf7-4043-9a57-caa0299432de tempest-ImagesOneServerTestJSON-2032343034 tempest-ImagesOneServerTestJSON-2032343034-project-member] Completed reading data from the image iterator. {{(pid=61962) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 743.056720] env[61962]: DEBUG oslo_vmware.rw_handles [None req-f081d78e-1cf7-4043-9a57-caa0299432de tempest-ImagesOneServerTestJSON-2032343034 tempest-ImagesOneServerTestJSON-2032343034-project-member] Closing write handle for https://esx7c1n2.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/d409585b-3eec-4468-8a8c-c762413e8161/f684bb17-3ab2-4bd5-a19e-4c36c57d0ebe/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=61962) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:281}} [ 743.160485] env[61962]: DEBUG oslo_service.periodic_task [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=61962) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 743.448253] env[61962]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d6010f27-c3bf-4d18-b389-370245076411 {{(pid=61962) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 743.456438] env[61962]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e00fd7ea-29bd-4fe9-a8c3-e151b408df4a {{(pid=61962) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 743.486533] env[61962]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-39804e6b-a403-419a-9fb9-228f2249c45c {{(pid=61962) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 743.494259] env[61962]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3a5d93c7-4b67-49cc-be24-34e43e0170c1 {{(pid=61962) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 743.508153] env[61962]: DEBUG nova.compute.provider_tree [None req-21d4b7db-8868-406d-878a-d4e21834a00a tempest-ServerExternalEventsTest-1541707263 tempest-ServerExternalEventsTest-1541707263-project-member] Inventory has not changed in ProviderTree for provider: 5a20dc57-fddd-49ec-bab5-953a03eebaa1 {{(pid=61962) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 743.517250] env[61962]: DEBUG nova.scheduler.client.report [None req-21d4b7db-8868-406d-878a-d4e21834a00a tempest-ServerExternalEventsTest-1541707263 tempest-ServerExternalEventsTest-1541707263-project-member] Inventory has not changed for provider 5a20dc57-fddd-49ec-bab5-953a03eebaa1 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 96, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61962) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 743.536231] env[61962]: DEBUG oslo_concurrency.lockutils [None req-21d4b7db-8868-406d-878a-d4e21834a00a tempest-ServerExternalEventsTest-1541707263 tempest-ServerExternalEventsTest-1541707263-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 0.615s {{(pid=61962) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 743.536861] env[61962]: ERROR nova.compute.manager [None req-21d4b7db-8868-406d-878a-d4e21834a00a tempest-ServerExternalEventsTest-1541707263 tempest-ServerExternalEventsTest-1541707263-project-member] [instance: dc6c9cb6-419f-4122-8351-0c517fb8616d] Failed to build and run instance: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 743.536861] env[61962]: Faults: ['InvalidArgument'] [ 743.536861] env[61962]: ERROR nova.compute.manager [instance: dc6c9cb6-419f-4122-8351-0c517fb8616d] Traceback (most recent call last): [ 743.536861] env[61962]: ERROR nova.compute.manager [instance: dc6c9cb6-419f-4122-8351-0c517fb8616d] File "/opt/stack/nova/nova/compute/manager.py", line 2633, in _build_and_run_instance [ 743.536861] env[61962]: ERROR nova.compute.manager [instance: dc6c9cb6-419f-4122-8351-0c517fb8616d] self.driver.spawn(context, instance, image_meta, [ 743.536861] env[61962]: ERROR nova.compute.manager [instance: dc6c9cb6-419f-4122-8351-0c517fb8616d] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 743.536861] env[61962]: ERROR nova.compute.manager [instance: dc6c9cb6-419f-4122-8351-0c517fb8616d] self._vmops.spawn(context, instance, image_meta, injected_files, [ 743.536861] env[61962]: ERROR nova.compute.manager [instance: dc6c9cb6-419f-4122-8351-0c517fb8616d] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 743.536861] env[61962]: ERROR nova.compute.manager [instance: dc6c9cb6-419f-4122-8351-0c517fb8616d] self._fetch_image_if_missing(context, vi) [ 743.536861] env[61962]: ERROR nova.compute.manager [instance: dc6c9cb6-419f-4122-8351-0c517fb8616d] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 743.536861] env[61962]: ERROR nova.compute.manager [instance: dc6c9cb6-419f-4122-8351-0c517fb8616d] image_cache(vi, tmp_image_ds_loc) [ 743.536861] env[61962]: ERROR nova.compute.manager [instance: dc6c9cb6-419f-4122-8351-0c517fb8616d] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 743.537198] env[61962]: ERROR nova.compute.manager [instance: dc6c9cb6-419f-4122-8351-0c517fb8616d] vm_util.copy_virtual_disk( [ 743.537198] env[61962]: ERROR nova.compute.manager [instance: dc6c9cb6-419f-4122-8351-0c517fb8616d] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 743.537198] env[61962]: ERROR nova.compute.manager [instance: dc6c9cb6-419f-4122-8351-0c517fb8616d] session._wait_for_task(vmdk_copy_task) [ 743.537198] env[61962]: ERROR nova.compute.manager [instance: dc6c9cb6-419f-4122-8351-0c517fb8616d] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 743.537198] env[61962]: ERROR nova.compute.manager [instance: dc6c9cb6-419f-4122-8351-0c517fb8616d] return self.wait_for_task(task_ref) [ 743.537198] env[61962]: ERROR nova.compute.manager [instance: dc6c9cb6-419f-4122-8351-0c517fb8616d] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 743.537198] env[61962]: ERROR nova.compute.manager [instance: dc6c9cb6-419f-4122-8351-0c517fb8616d] return evt.wait() [ 743.537198] env[61962]: ERROR nova.compute.manager [instance: dc6c9cb6-419f-4122-8351-0c517fb8616d] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 743.537198] env[61962]: ERROR nova.compute.manager [instance: dc6c9cb6-419f-4122-8351-0c517fb8616d] result = hub.switch() [ 743.537198] env[61962]: ERROR nova.compute.manager [instance: dc6c9cb6-419f-4122-8351-0c517fb8616d] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 743.537198] env[61962]: ERROR nova.compute.manager [instance: dc6c9cb6-419f-4122-8351-0c517fb8616d] return self.greenlet.switch() [ 743.537198] env[61962]: ERROR nova.compute.manager [instance: dc6c9cb6-419f-4122-8351-0c517fb8616d] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 743.537198] env[61962]: ERROR nova.compute.manager [instance: dc6c9cb6-419f-4122-8351-0c517fb8616d] self.f(*self.args, **self.kw) [ 743.537521] env[61962]: ERROR nova.compute.manager [instance: dc6c9cb6-419f-4122-8351-0c517fb8616d] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 743.537521] env[61962]: ERROR nova.compute.manager [instance: dc6c9cb6-419f-4122-8351-0c517fb8616d] raise exceptions.translate_fault(task_info.error) [ 743.537521] env[61962]: ERROR nova.compute.manager [instance: dc6c9cb6-419f-4122-8351-0c517fb8616d] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 743.537521] env[61962]: ERROR nova.compute.manager [instance: dc6c9cb6-419f-4122-8351-0c517fb8616d] Faults: ['InvalidArgument'] [ 743.537521] env[61962]: ERROR nova.compute.manager [instance: dc6c9cb6-419f-4122-8351-0c517fb8616d] [ 743.537669] env[61962]: DEBUG nova.compute.utils [None req-21d4b7db-8868-406d-878a-d4e21834a00a tempest-ServerExternalEventsTest-1541707263 tempest-ServerExternalEventsTest-1541707263-project-member] [instance: dc6c9cb6-419f-4122-8351-0c517fb8616d] VimFaultException {{(pid=61962) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 743.539028] env[61962]: DEBUG nova.compute.manager [None req-21d4b7db-8868-406d-878a-d4e21834a00a tempest-ServerExternalEventsTest-1541707263 tempest-ServerExternalEventsTest-1541707263-project-member] [instance: dc6c9cb6-419f-4122-8351-0c517fb8616d] Build of instance dc6c9cb6-419f-4122-8351-0c517fb8616d was re-scheduled: A specified parameter was not correct: fileType [ 743.539028] env[61962]: Faults: ['InvalidArgument'] {{(pid=61962) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2472}} [ 743.539393] env[61962]: DEBUG nova.compute.manager [None req-21d4b7db-8868-406d-878a-d4e21834a00a tempest-ServerExternalEventsTest-1541707263 tempest-ServerExternalEventsTest-1541707263-project-member] [instance: dc6c9cb6-419f-4122-8351-0c517fb8616d] Unplugging VIFs for instance {{(pid=61962) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:2998}} [ 743.539587] env[61962]: DEBUG nova.compute.manager [None req-21d4b7db-8868-406d-878a-d4e21834a00a tempest-ServerExternalEventsTest-1541707263 tempest-ServerExternalEventsTest-1541707263-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=61962) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3021}} [ 743.539709] env[61962]: DEBUG nova.compute.manager [None req-21d4b7db-8868-406d-878a-d4e21834a00a tempest-ServerExternalEventsTest-1541707263 tempest-ServerExternalEventsTest-1541707263-project-member] [instance: dc6c9cb6-419f-4122-8351-0c517fb8616d] Deallocating network for instance {{(pid=61962) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2281}} [ 743.539870] env[61962]: DEBUG nova.network.neutron [None req-21d4b7db-8868-406d-878a-d4e21834a00a tempest-ServerExternalEventsTest-1541707263 tempest-ServerExternalEventsTest-1541707263-project-member] [instance: dc6c9cb6-419f-4122-8351-0c517fb8616d] deallocate_for_instance() {{(pid=61962) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 743.887891] env[61962]: DEBUG nova.network.neutron [None req-21d4b7db-8868-406d-878a-d4e21834a00a tempest-ServerExternalEventsTest-1541707263 tempest-ServerExternalEventsTest-1541707263-project-member] [instance: dc6c9cb6-419f-4122-8351-0c517fb8616d] Updating instance_info_cache with network_info: [] {{(pid=61962) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 743.902844] env[61962]: INFO nova.compute.manager [None req-21d4b7db-8868-406d-878a-d4e21834a00a tempest-ServerExternalEventsTest-1541707263 tempest-ServerExternalEventsTest-1541707263-project-member] [instance: dc6c9cb6-419f-4122-8351-0c517fb8616d] Took 0.36 seconds to deallocate network for instance. [ 744.022307] env[61962]: INFO nova.scheduler.client.report [None req-21d4b7db-8868-406d-878a-d4e21834a00a tempest-ServerExternalEventsTest-1541707263 tempest-ServerExternalEventsTest-1541707263-project-member] Deleted allocations for instance dc6c9cb6-419f-4122-8351-0c517fb8616d [ 744.054517] env[61962]: DEBUG oslo_concurrency.lockutils [None req-21d4b7db-8868-406d-878a-d4e21834a00a tempest-ServerExternalEventsTest-1541707263 tempest-ServerExternalEventsTest-1541707263-project-member] Lock "dc6c9cb6-419f-4122-8351-0c517fb8616d" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 194.949s {{(pid=61962) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 744.080851] env[61962]: DEBUG nova.compute.manager [None req-2457412b-4345-46bb-8773-263f9b3b3e38 tempest-MultipleCreateTestJSON-937113120 tempest-MultipleCreateTestJSON-937113120-project-member] [instance: 0aa340f7-a219-47f5-9d7d-dd4062643cf7] Starting instance... {{(pid=61962) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2424}} [ 744.132878] env[61962]: DEBUG oslo_concurrency.lockutils [None req-2457412b-4345-46bb-8773-263f9b3b3e38 tempest-MultipleCreateTestJSON-937113120 tempest-MultipleCreateTestJSON-937113120-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61962) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 744.133133] env[61962]: DEBUG oslo_concurrency.lockutils [None req-2457412b-4345-46bb-8773-263f9b3b3e38 tempest-MultipleCreateTestJSON-937113120 tempest-MultipleCreateTestJSON-937113120-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=61962) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 744.134694] env[61962]: INFO nova.compute.claims [None req-2457412b-4345-46bb-8773-263f9b3b3e38 tempest-MultipleCreateTestJSON-937113120 tempest-MultipleCreateTestJSON-937113120-project-member] [instance: 0aa340f7-a219-47f5-9d7d-dd4062643cf7] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 744.158481] env[61962]: DEBUG oslo_service.periodic_task [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=61962) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 744.599972] env[61962]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-55c7925a-6018-43e5-9525-e3fe0bad0ac4 {{(pid=61962) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 744.608460] env[61962]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-df91e597-339a-4b1a-8337-c66c4958f934 {{(pid=61962) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 744.639342] env[61962]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1631af59-fd3c-49f9-b044-dc52297b785d {{(pid=61962) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 744.647279] env[61962]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4fdd4af6-145d-4a6c-a38c-b8feef1db27b {{(pid=61962) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 744.661670] env[61962]: DEBUG nova.compute.provider_tree [None req-2457412b-4345-46bb-8773-263f9b3b3e38 tempest-MultipleCreateTestJSON-937113120 tempest-MultipleCreateTestJSON-937113120-project-member] Inventory has not changed in ProviderTree for provider: 5a20dc57-fddd-49ec-bab5-953a03eebaa1 {{(pid=61962) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 744.670643] env[61962]: DEBUG nova.scheduler.client.report [None req-2457412b-4345-46bb-8773-263f9b3b3e38 tempest-MultipleCreateTestJSON-937113120 tempest-MultipleCreateTestJSON-937113120-project-member] Inventory has not changed for provider 5a20dc57-fddd-49ec-bab5-953a03eebaa1 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 96, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61962) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 744.687450] env[61962]: DEBUG oslo_concurrency.lockutils [None req-2457412b-4345-46bb-8773-263f9b3b3e38 tempest-MultipleCreateTestJSON-937113120 tempest-MultipleCreateTestJSON-937113120-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.554s {{(pid=61962) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 744.688016] env[61962]: DEBUG nova.compute.manager [None req-2457412b-4345-46bb-8773-263f9b3b3e38 tempest-MultipleCreateTestJSON-937113120 tempest-MultipleCreateTestJSON-937113120-project-member] [instance: 0aa340f7-a219-47f5-9d7d-dd4062643cf7] Start building networks asynchronously for instance. {{(pid=61962) _build_resources /opt/stack/nova/nova/compute/manager.py:2821}} [ 744.724319] env[61962]: DEBUG nova.compute.utils [None req-2457412b-4345-46bb-8773-263f9b3b3e38 tempest-MultipleCreateTestJSON-937113120 tempest-MultipleCreateTestJSON-937113120-project-member] Using /dev/sd instead of None {{(pid=61962) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 744.726473] env[61962]: DEBUG nova.compute.manager [None req-2457412b-4345-46bb-8773-263f9b3b3e38 tempest-MultipleCreateTestJSON-937113120 tempest-MultipleCreateTestJSON-937113120-project-member] [instance: 0aa340f7-a219-47f5-9d7d-dd4062643cf7] Allocating IP information in the background. {{(pid=61962) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1973}} [ 744.726590] env[61962]: DEBUG nova.network.neutron [None req-2457412b-4345-46bb-8773-263f9b3b3e38 tempest-MultipleCreateTestJSON-937113120 tempest-MultipleCreateTestJSON-937113120-project-member] [instance: 0aa340f7-a219-47f5-9d7d-dd4062643cf7] allocate_for_instance() {{(pid=61962) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 744.735692] env[61962]: DEBUG nova.compute.manager [None req-2457412b-4345-46bb-8773-263f9b3b3e38 tempest-MultipleCreateTestJSON-937113120 tempest-MultipleCreateTestJSON-937113120-project-member] [instance: 0aa340f7-a219-47f5-9d7d-dd4062643cf7] Start building block device mappings for instance. {{(pid=61962) _build_resources /opt/stack/nova/nova/compute/manager.py:2856}} [ 744.786590] env[61962]: DEBUG nova.policy [None req-2457412b-4345-46bb-8773-263f9b3b3e38 tempest-MultipleCreateTestJSON-937113120 tempest-MultipleCreateTestJSON-937113120-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '6183a3edafb34815b27a63da01d7ca7a', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'ed943c69c73d48e2b1b2bab311295d84', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61962) authorize /opt/stack/nova/nova/policy.py:203}} [ 744.807300] env[61962]: DEBUG nova.compute.manager [None req-2457412b-4345-46bb-8773-263f9b3b3e38 tempest-MultipleCreateTestJSON-937113120 tempest-MultipleCreateTestJSON-937113120-project-member] [instance: 0aa340f7-a219-47f5-9d7d-dd4062643cf7] Start spawning the instance on the hypervisor. {{(pid=61962) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2630}} [ 744.837969] env[61962]: DEBUG nova.virt.hardware [None req-2457412b-4345-46bb-8773-263f9b3b3e38 tempest-MultipleCreateTestJSON-937113120 tempest-MultipleCreateTestJSON-937113120-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-12-01T12:09:23Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=128,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-12-01T12:09:07Z,direct_url=,disk_format='vmdk',id=f684bb17-3ab2-4bd5-a19e-4c36c57d0ebe,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='eef556fb5c4f49b889491ae31a496de5',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-12-01T12:09:08Z,virtual_size=,visibility=), allow threads: False {{(pid=61962) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 744.838106] env[61962]: DEBUG nova.virt.hardware [None req-2457412b-4345-46bb-8773-263f9b3b3e38 tempest-MultipleCreateTestJSON-937113120 tempest-MultipleCreateTestJSON-937113120-project-member] Flavor limits 0:0:0 {{(pid=61962) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 744.839012] env[61962]: DEBUG nova.virt.hardware [None req-2457412b-4345-46bb-8773-263f9b3b3e38 tempest-MultipleCreateTestJSON-937113120 tempest-MultipleCreateTestJSON-937113120-project-member] Image limits 0:0:0 {{(pid=61962) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 744.839012] env[61962]: DEBUG nova.virt.hardware [None req-2457412b-4345-46bb-8773-263f9b3b3e38 tempest-MultipleCreateTestJSON-937113120 tempest-MultipleCreateTestJSON-937113120-project-member] Flavor pref 0:0:0 {{(pid=61962) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 744.839012] env[61962]: DEBUG nova.virt.hardware [None req-2457412b-4345-46bb-8773-263f9b3b3e38 tempest-MultipleCreateTestJSON-937113120 tempest-MultipleCreateTestJSON-937113120-project-member] Image pref 0:0:0 {{(pid=61962) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 744.839012] env[61962]: DEBUG nova.virt.hardware [None req-2457412b-4345-46bb-8773-263f9b3b3e38 tempest-MultipleCreateTestJSON-937113120 tempest-MultipleCreateTestJSON-937113120-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61962) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 744.839012] env[61962]: DEBUG nova.virt.hardware [None req-2457412b-4345-46bb-8773-263f9b3b3e38 tempest-MultipleCreateTestJSON-937113120 tempest-MultipleCreateTestJSON-937113120-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61962) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 744.839342] env[61962]: DEBUG nova.virt.hardware [None req-2457412b-4345-46bb-8773-263f9b3b3e38 tempest-MultipleCreateTestJSON-937113120 tempest-MultipleCreateTestJSON-937113120-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61962) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 744.839342] env[61962]: DEBUG nova.virt.hardware [None req-2457412b-4345-46bb-8773-263f9b3b3e38 tempest-MultipleCreateTestJSON-937113120 tempest-MultipleCreateTestJSON-937113120-project-member] Got 1 possible topologies {{(pid=61962) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 744.840023] env[61962]: DEBUG nova.virt.hardware [None req-2457412b-4345-46bb-8773-263f9b3b3e38 tempest-MultipleCreateTestJSON-937113120 tempest-MultipleCreateTestJSON-937113120-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61962) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 744.840023] env[61962]: DEBUG nova.virt.hardware [None req-2457412b-4345-46bb-8773-263f9b3b3e38 tempest-MultipleCreateTestJSON-937113120 tempest-MultipleCreateTestJSON-937113120-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61962) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 744.840514] env[61962]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b8762397-30d6-4d0f-8b17-ff998d1466b8 {{(pid=61962) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 744.849409] env[61962]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c33b32ab-a614-4e0e-979b-bc8e04499c9f {{(pid=61962) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 745.273157] env[61962]: DEBUG nova.network.neutron [None req-2457412b-4345-46bb-8773-263f9b3b3e38 tempest-MultipleCreateTestJSON-937113120 tempest-MultipleCreateTestJSON-937113120-project-member] [instance: 0aa340f7-a219-47f5-9d7d-dd4062643cf7] Successfully created port: 5670d6fe-d522-426a-af54-db2c0ce73bad {{(pid=61962) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 746.243426] env[61962]: DEBUG nova.compute.manager [req-ac830b9d-533b-4aff-8308-f148f13e3836 req-1bd4a2b2-e3dd-4a18-9c76-929b8c377c0b service nova] [instance: 0aa340f7-a219-47f5-9d7d-dd4062643cf7] Received event network-vif-plugged-5670d6fe-d522-426a-af54-db2c0ce73bad {{(pid=61962) external_instance_event /opt/stack/nova/nova/compute/manager.py:11210}} [ 746.243579] env[61962]: DEBUG oslo_concurrency.lockutils [req-ac830b9d-533b-4aff-8308-f148f13e3836 req-1bd4a2b2-e3dd-4a18-9c76-929b8c377c0b service nova] Acquiring lock "0aa340f7-a219-47f5-9d7d-dd4062643cf7-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=61962) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 746.243784] env[61962]: DEBUG oslo_concurrency.lockutils [req-ac830b9d-533b-4aff-8308-f148f13e3836 req-1bd4a2b2-e3dd-4a18-9c76-929b8c377c0b service nova] Lock "0aa340f7-a219-47f5-9d7d-dd4062643cf7-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=61962) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 746.243949] env[61962]: DEBUG oslo_concurrency.lockutils [req-ac830b9d-533b-4aff-8308-f148f13e3836 req-1bd4a2b2-e3dd-4a18-9c76-929b8c377c0b service nova] Lock "0aa340f7-a219-47f5-9d7d-dd4062643cf7-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=61962) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 746.244135] env[61962]: DEBUG nova.compute.manager [req-ac830b9d-533b-4aff-8308-f148f13e3836 req-1bd4a2b2-e3dd-4a18-9c76-929b8c377c0b service nova] [instance: 0aa340f7-a219-47f5-9d7d-dd4062643cf7] No waiting events found dispatching network-vif-plugged-5670d6fe-d522-426a-af54-db2c0ce73bad {{(pid=61962) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 746.244306] env[61962]: WARNING nova.compute.manager [req-ac830b9d-533b-4aff-8308-f148f13e3836 req-1bd4a2b2-e3dd-4a18-9c76-929b8c377c0b service nova] [instance: 0aa340f7-a219-47f5-9d7d-dd4062643cf7] Received unexpected event network-vif-plugged-5670d6fe-d522-426a-af54-db2c0ce73bad for instance with vm_state building and task_state spawning. [ 746.331550] env[61962]: DEBUG nova.network.neutron [None req-2457412b-4345-46bb-8773-263f9b3b3e38 tempest-MultipleCreateTestJSON-937113120 tempest-MultipleCreateTestJSON-937113120-project-member] [instance: 0aa340f7-a219-47f5-9d7d-dd4062643cf7] Successfully updated port: 5670d6fe-d522-426a-af54-db2c0ce73bad {{(pid=61962) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 746.359160] env[61962]: DEBUG oslo_concurrency.lockutils [None req-2457412b-4345-46bb-8773-263f9b3b3e38 tempest-MultipleCreateTestJSON-937113120 tempest-MultipleCreateTestJSON-937113120-project-member] Acquiring lock "refresh_cache-0aa340f7-a219-47f5-9d7d-dd4062643cf7" {{(pid=61962) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 746.359316] env[61962]: DEBUG oslo_concurrency.lockutils [None req-2457412b-4345-46bb-8773-263f9b3b3e38 tempest-MultipleCreateTestJSON-937113120 tempest-MultipleCreateTestJSON-937113120-project-member] Acquired lock "refresh_cache-0aa340f7-a219-47f5-9d7d-dd4062643cf7" {{(pid=61962) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 746.359469] env[61962]: DEBUG nova.network.neutron [None req-2457412b-4345-46bb-8773-263f9b3b3e38 tempest-MultipleCreateTestJSON-937113120 tempest-MultipleCreateTestJSON-937113120-project-member] [instance: 0aa340f7-a219-47f5-9d7d-dd4062643cf7] Building network info cache for instance {{(pid=61962) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 746.669470] env[61962]: DEBUG nova.network.neutron [None req-2457412b-4345-46bb-8773-263f9b3b3e38 tempest-MultipleCreateTestJSON-937113120 tempest-MultipleCreateTestJSON-937113120-project-member] [instance: 0aa340f7-a219-47f5-9d7d-dd4062643cf7] Instance cache missing network info. {{(pid=61962) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 746.956830] env[61962]: DEBUG nova.network.neutron [None req-2457412b-4345-46bb-8773-263f9b3b3e38 tempest-MultipleCreateTestJSON-937113120 tempest-MultipleCreateTestJSON-937113120-project-member] [instance: 0aa340f7-a219-47f5-9d7d-dd4062643cf7] Updating instance_info_cache with network_info: [{"id": "5670d6fe-d522-426a-af54-db2c0ce73bad", "address": "fa:16:3e:c0:0e:12", "network": {"id": "5b523553-6f39-4db6-96a6-a28671c489ae", "bridge": "br-int", "label": "tempest-MultipleCreateTestJSON-294270932-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "ed943c69c73d48e2b1b2bab311295d84", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "8ab9e5e6-9bf8-4a8d-91c8-d22148e3d2ee", "external-id": "nsx-vlan-transportzone-401", "segmentation_id": 401, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap5670d6fe-d5", "ovs_interfaceid": "5670d6fe-d522-426a-af54-db2c0ce73bad", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61962) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 746.975293] env[61962]: DEBUG oslo_concurrency.lockutils [None req-2457412b-4345-46bb-8773-263f9b3b3e38 tempest-MultipleCreateTestJSON-937113120 tempest-MultipleCreateTestJSON-937113120-project-member] Releasing lock "refresh_cache-0aa340f7-a219-47f5-9d7d-dd4062643cf7" {{(pid=61962) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 746.975767] env[61962]: DEBUG nova.compute.manager [None req-2457412b-4345-46bb-8773-263f9b3b3e38 tempest-MultipleCreateTestJSON-937113120 tempest-MultipleCreateTestJSON-937113120-project-member] [instance: 0aa340f7-a219-47f5-9d7d-dd4062643cf7] Instance network_info: |[{"id": "5670d6fe-d522-426a-af54-db2c0ce73bad", "address": "fa:16:3e:c0:0e:12", "network": {"id": "5b523553-6f39-4db6-96a6-a28671c489ae", "bridge": "br-int", "label": "tempest-MultipleCreateTestJSON-294270932-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "ed943c69c73d48e2b1b2bab311295d84", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "8ab9e5e6-9bf8-4a8d-91c8-d22148e3d2ee", "external-id": "nsx-vlan-transportzone-401", "segmentation_id": 401, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap5670d6fe-d5", "ovs_interfaceid": "5670d6fe-d522-426a-af54-db2c0ce73bad", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=61962) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 746.976551] env[61962]: DEBUG nova.virt.vmwareapi.vmops [None req-2457412b-4345-46bb-8773-263f9b3b3e38 tempest-MultipleCreateTestJSON-937113120 tempest-MultipleCreateTestJSON-937113120-project-member] [instance: 0aa340f7-a219-47f5-9d7d-dd4062643cf7] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:c0:0e:12', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '8ab9e5e6-9bf8-4a8d-91c8-d22148e3d2ee', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '5670d6fe-d522-426a-af54-db2c0ce73bad', 'vif_model': 'vmxnet3'}] {{(pid=61962) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 746.984794] env[61962]: DEBUG oslo.service.loopingcall [None req-2457412b-4345-46bb-8773-263f9b3b3e38 tempest-MultipleCreateTestJSON-937113120 tempest-MultipleCreateTestJSON-937113120-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=61962) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 746.985227] env[61962]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 0aa340f7-a219-47f5-9d7d-dd4062643cf7] Creating VM on the ESX host {{(pid=61962) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 746.985628] env[61962]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-28a522ce-7e72-40d8-80d8-c960fda9e25d {{(pid=61962) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 747.007858] env[61962]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 747.007858] env[61962]: value = "task-4891961" [ 747.007858] env[61962]: _type = "Task" [ 747.007858] env[61962]: } to complete. {{(pid=61962) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 747.017921] env[61962]: DEBUG oslo_vmware.api [-] Task: {'id': task-4891961, 'name': CreateVM_Task} progress is 0%. {{(pid=61962) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 747.518742] env[61962]: DEBUG oslo_vmware.api [-] Task: {'id': task-4891961, 'name': CreateVM_Task, 'duration_secs': 0.323737} completed successfully. {{(pid=61962) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 747.518988] env[61962]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 0aa340f7-a219-47f5-9d7d-dd4062643cf7] Created VM on the ESX host {{(pid=61962) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 747.519644] env[61962]: DEBUG oslo_concurrency.lockutils [None req-2457412b-4345-46bb-8773-263f9b3b3e38 tempest-MultipleCreateTestJSON-937113120 tempest-MultipleCreateTestJSON-937113120-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/f684bb17-3ab2-4bd5-a19e-4c36c57d0ebe" {{(pid=61962) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 747.519851] env[61962]: DEBUG oslo_concurrency.lockutils [None req-2457412b-4345-46bb-8773-263f9b3b3e38 tempest-MultipleCreateTestJSON-937113120 tempest-MultipleCreateTestJSON-937113120-project-member] Acquired lock "[datastore2] devstack-image-cache_base/f684bb17-3ab2-4bd5-a19e-4c36c57d0ebe" {{(pid=61962) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 747.520157] env[61962]: DEBUG oslo_concurrency.lockutils [None req-2457412b-4345-46bb-8773-263f9b3b3e38 tempest-MultipleCreateTestJSON-937113120 tempest-MultipleCreateTestJSON-937113120-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/f684bb17-3ab2-4bd5-a19e-4c36c57d0ebe" {{(pid=61962) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 747.520403] env[61962]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-96afe8b1-ea4a-4522-a763-e8cad03c4b4e {{(pid=61962) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 747.526986] env[61962]: DEBUG oslo_vmware.api [None req-2457412b-4345-46bb-8773-263f9b3b3e38 tempest-MultipleCreateTestJSON-937113120 tempest-MultipleCreateTestJSON-937113120-project-member] Waiting for the task: (returnval){ [ 747.526986] env[61962]: value = "session[5210918f-aadc-9b29-42fa-0929c2e42627]52c4ceef-27b4-2262-c9b0-b5161bf3fafa" [ 747.526986] env[61962]: _type = "Task" [ 747.526986] env[61962]: } to complete. {{(pid=61962) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 747.536522] env[61962]: DEBUG oslo_vmware.api [None req-2457412b-4345-46bb-8773-263f9b3b3e38 tempest-MultipleCreateTestJSON-937113120 tempest-MultipleCreateTestJSON-937113120-project-member] Task: {'id': session[5210918f-aadc-9b29-42fa-0929c2e42627]52c4ceef-27b4-2262-c9b0-b5161bf3fafa, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61962) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 748.037935] env[61962]: DEBUG oslo_concurrency.lockutils [None req-2457412b-4345-46bb-8773-263f9b3b3e38 tempest-MultipleCreateTestJSON-937113120 tempest-MultipleCreateTestJSON-937113120-project-member] Releasing lock "[datastore2] devstack-image-cache_base/f684bb17-3ab2-4bd5-a19e-4c36c57d0ebe" {{(pid=61962) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 748.038716] env[61962]: DEBUG nova.virt.vmwareapi.vmops [None req-2457412b-4345-46bb-8773-263f9b3b3e38 tempest-MultipleCreateTestJSON-937113120 tempest-MultipleCreateTestJSON-937113120-project-member] [instance: 0aa340f7-a219-47f5-9d7d-dd4062643cf7] Processing image f684bb17-3ab2-4bd5-a19e-4c36c57d0ebe {{(pid=61962) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 748.038716] env[61962]: DEBUG oslo_concurrency.lockutils [None req-2457412b-4345-46bb-8773-263f9b3b3e38 tempest-MultipleCreateTestJSON-937113120 tempest-MultipleCreateTestJSON-937113120-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/f684bb17-3ab2-4bd5-a19e-4c36c57d0ebe/f684bb17-3ab2-4bd5-a19e-4c36c57d0ebe.vmdk" {{(pid=61962) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 748.419782] env[61962]: DEBUG nova.compute.manager [req-89fea572-1a01-4ea8-9b82-6b5b91473fbb req-0bd38f7e-8bac-4e81-a969-ed9da5e56aa0 service nova] [instance: 0aa340f7-a219-47f5-9d7d-dd4062643cf7] Received event network-changed-5670d6fe-d522-426a-af54-db2c0ce73bad {{(pid=61962) external_instance_event /opt/stack/nova/nova/compute/manager.py:11210}} [ 748.419976] env[61962]: DEBUG nova.compute.manager [req-89fea572-1a01-4ea8-9b82-6b5b91473fbb req-0bd38f7e-8bac-4e81-a969-ed9da5e56aa0 service nova] [instance: 0aa340f7-a219-47f5-9d7d-dd4062643cf7] Refreshing instance network info cache due to event network-changed-5670d6fe-d522-426a-af54-db2c0ce73bad. {{(pid=61962) external_instance_event /opt/stack/nova/nova/compute/manager.py:11215}} [ 748.420204] env[61962]: DEBUG oslo_concurrency.lockutils [req-89fea572-1a01-4ea8-9b82-6b5b91473fbb req-0bd38f7e-8bac-4e81-a969-ed9da5e56aa0 service nova] Acquiring lock "refresh_cache-0aa340f7-a219-47f5-9d7d-dd4062643cf7" {{(pid=61962) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 748.420329] env[61962]: DEBUG oslo_concurrency.lockutils [req-89fea572-1a01-4ea8-9b82-6b5b91473fbb req-0bd38f7e-8bac-4e81-a969-ed9da5e56aa0 service nova] Acquired lock "refresh_cache-0aa340f7-a219-47f5-9d7d-dd4062643cf7" {{(pid=61962) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 748.420490] env[61962]: DEBUG nova.network.neutron [req-89fea572-1a01-4ea8-9b82-6b5b91473fbb req-0bd38f7e-8bac-4e81-a969-ed9da5e56aa0 service nova] [instance: 0aa340f7-a219-47f5-9d7d-dd4062643cf7] Refreshing network info cache for port 5670d6fe-d522-426a-af54-db2c0ce73bad {{(pid=61962) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 748.814429] env[61962]: DEBUG nova.network.neutron [req-89fea572-1a01-4ea8-9b82-6b5b91473fbb req-0bd38f7e-8bac-4e81-a969-ed9da5e56aa0 service nova] [instance: 0aa340f7-a219-47f5-9d7d-dd4062643cf7] Updated VIF entry in instance network info cache for port 5670d6fe-d522-426a-af54-db2c0ce73bad. {{(pid=61962) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 748.814761] env[61962]: DEBUG nova.network.neutron [req-89fea572-1a01-4ea8-9b82-6b5b91473fbb req-0bd38f7e-8bac-4e81-a969-ed9da5e56aa0 service nova] [instance: 0aa340f7-a219-47f5-9d7d-dd4062643cf7] Updating instance_info_cache with network_info: [{"id": "5670d6fe-d522-426a-af54-db2c0ce73bad", "address": "fa:16:3e:c0:0e:12", "network": {"id": "5b523553-6f39-4db6-96a6-a28671c489ae", "bridge": "br-int", "label": "tempest-MultipleCreateTestJSON-294270932-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "ed943c69c73d48e2b1b2bab311295d84", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "8ab9e5e6-9bf8-4a8d-91c8-d22148e3d2ee", "external-id": "nsx-vlan-transportzone-401", "segmentation_id": 401, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap5670d6fe-d5", "ovs_interfaceid": "5670d6fe-d522-426a-af54-db2c0ce73bad", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61962) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 748.824689] env[61962]: DEBUG oslo_concurrency.lockutils [req-89fea572-1a01-4ea8-9b82-6b5b91473fbb req-0bd38f7e-8bac-4e81-a969-ed9da5e56aa0 service nova] Releasing lock "refresh_cache-0aa340f7-a219-47f5-9d7d-dd4062643cf7" {{(pid=61962) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 750.530151] env[61962]: DEBUG oslo_concurrency.lockutils [None req-6558e0f3-49b6-441c-bbcf-6cbf54407ba8 tempest-ImagesOneServerTestJSON-2032343034 tempest-ImagesOneServerTestJSON-2032343034-project-member] Acquiring lock "6ba9719c-3689-4db6-8ab2-441dc52c7bfd" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=61962) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 751.414469] env[61962]: DEBUG oslo_concurrency.lockutils [None req-f0b41a02-3e76-49fe-ad00-90824eaa248d tempest-TenantUsagesTestJSON-1044362109 tempest-TenantUsagesTestJSON-1044362109-project-member] Acquiring lock "1a7d5a5f-c5b1-40d2-828b-090fb8e76e3d" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=61962) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 751.576554] env[61962]: DEBUG oslo_concurrency.lockutils [None req-59faea55-6bf8-49ed-a131-74eebe70678f tempest-ImagesTestJSON-240028984 tempest-ImagesTestJSON-240028984-project-member] Acquiring lock "d0df932e-822b-4c60-a588-445aa2f14cdb" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=61962) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 758.438468] env[61962]: DEBUG oslo_concurrency.lockutils [None req-282d8ecb-75d0-4933-a909-a3c369574057 tempest-MigrationsAdminTest-5656953 tempest-MigrationsAdminTest-5656953-project-member] Acquiring lock "a6e6f30d-c667-4f70-bf60-596a8fe97188" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=61962) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 768.997682] env[61962]: DEBUG oslo_concurrency.lockutils [None req-b60d8127-8784-418b-a447-ff0e9406e304 tempest-ServersAdmin275Test-677477004 tempest-ServersAdmin275Test-677477004-project-member] Acquiring lock "b9782756-27e1-46fc-940f-069cfb9ab9f9" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=61962) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 771.700881] env[61962]: DEBUG oslo_concurrency.lockutils [None req-0cd9e5a1-1981-46d4-a08b-3dfe0db9f6ee tempest-FloatingIPsAssociationTestJSON-152212675 tempest-FloatingIPsAssociationTestJSON-152212675-project-member] Acquiring lock "667f56c0-896f-4e25-9e8a-3954f7733dc0" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=61962) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 777.145057] env[61962]: DEBUG oslo_concurrency.lockutils [None req-86c2f272-a96f-407d-8b47-f5912c57652f tempest-VolumesAssistedSnapshotsTest-1134839840 tempest-VolumesAssistedSnapshotsTest-1134839840-project-member] Acquiring lock "295064f0-9677-4287-877a-ef2a33a87fef" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=61962) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 779.713631] env[61962]: DEBUG oslo_concurrency.lockutils [None req-067e3123-b7e7-4a03-a777-b5b7270a1ee5 tempest-ServersV294TestFqdnHostnames-1341387601 tempest-ServersV294TestFqdnHostnames-1341387601-project-member] Acquiring lock "6996289c-f881-4733-8943-98e3633e43ca" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=61962) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 782.514886] env[61962]: DEBUG oslo_concurrency.lockutils [None req-be4f8e28-478c-4391-80f6-60642ffadc1e tempest-MultipleCreateTestJSON-937113120 tempest-MultipleCreateTestJSON-937113120-project-member] Acquiring lock "0aa340f7-a219-47f5-9d7d-dd4062643cf7" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=61962) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 782.941780] env[61962]: DEBUG oslo_concurrency.lockutils [None req-21467c86-d0f5-467b-bc6c-783c4ad26f01 tempest-MultipleCreateTestJSON-937113120 tempest-MultipleCreateTestJSON-937113120-project-member] Acquiring lock "85c6b6af-f1b5-47ca-8e02-61c3669ec02b" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=61962) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 791.675983] env[61962]: WARNING oslo_vmware.rw_handles [None req-f081d78e-1cf7-4043-9a57-caa0299432de tempest-ImagesOneServerTestJSON-2032343034 tempest-ImagesOneServerTestJSON-2032343034-project-member] Error occurred while reading the HTTP response.: http.client.RemoteDisconnected: Remote end closed connection without response [ 791.675983] env[61962]: ERROR oslo_vmware.rw_handles Traceback (most recent call last): [ 791.675983] env[61962]: ERROR oslo_vmware.rw_handles File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py", line 283, in close [ 791.675983] env[61962]: ERROR oslo_vmware.rw_handles self._conn.getresponse() [ 791.675983] env[61962]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 1375, in getresponse [ 791.675983] env[61962]: ERROR oslo_vmware.rw_handles response.begin() [ 791.675983] env[61962]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 318, in begin [ 791.675983] env[61962]: ERROR oslo_vmware.rw_handles version, status, reason = self._read_status() [ 791.675983] env[61962]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 287, in _read_status [ 791.675983] env[61962]: ERROR oslo_vmware.rw_handles raise RemoteDisconnected("Remote end closed connection without" [ 791.675983] env[61962]: ERROR oslo_vmware.rw_handles http.client.RemoteDisconnected: Remote end closed connection without response [ 791.675983] env[61962]: ERROR oslo_vmware.rw_handles [ 791.677214] env[61962]: DEBUG nova.virt.vmwareapi.images [None req-f081d78e-1cf7-4043-9a57-caa0299432de tempest-ImagesOneServerTestJSON-2032343034 tempest-ImagesOneServerTestJSON-2032343034-project-member] [instance: 6ba9719c-3689-4db6-8ab2-441dc52c7bfd] Downloaded image file data f684bb17-3ab2-4bd5-a19e-4c36c57d0ebe to vmware_temp/d409585b-3eec-4468-8a8c-c762413e8161/f684bb17-3ab2-4bd5-a19e-4c36c57d0ebe/tmp-sparse.vmdk on the data store datastore2 {{(pid=61962) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:258}} [ 791.678203] env[61962]: DEBUG nova.virt.vmwareapi.vmops [None req-f081d78e-1cf7-4043-9a57-caa0299432de tempest-ImagesOneServerTestJSON-2032343034 tempest-ImagesOneServerTestJSON-2032343034-project-member] [instance: 6ba9719c-3689-4db6-8ab2-441dc52c7bfd] Caching image {{(pid=61962) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 791.678475] env[61962]: DEBUG nova.virt.vmwareapi.vm_util [None req-f081d78e-1cf7-4043-9a57-caa0299432de tempest-ImagesOneServerTestJSON-2032343034 tempest-ImagesOneServerTestJSON-2032343034-project-member] Copying Virtual Disk [datastore2] vmware_temp/d409585b-3eec-4468-8a8c-c762413e8161/f684bb17-3ab2-4bd5-a19e-4c36c57d0ebe/tmp-sparse.vmdk to [datastore2] vmware_temp/d409585b-3eec-4468-8a8c-c762413e8161/f684bb17-3ab2-4bd5-a19e-4c36c57d0ebe/f684bb17-3ab2-4bd5-a19e-4c36c57d0ebe.vmdk {{(pid=61962) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 791.678767] env[61962]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-0224ff88-b521-476b-b13d-13221194c914 {{(pid=61962) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 791.687225] env[61962]: DEBUG oslo_vmware.api [None req-f081d78e-1cf7-4043-9a57-caa0299432de tempest-ImagesOneServerTestJSON-2032343034 tempest-ImagesOneServerTestJSON-2032343034-project-member] Waiting for the task: (returnval){ [ 791.687225] env[61962]: value = "task-4891962" [ 791.687225] env[61962]: _type = "Task" [ 791.687225] env[61962]: } to complete. {{(pid=61962) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 791.701902] env[61962]: DEBUG oslo_vmware.api [None req-f081d78e-1cf7-4043-9a57-caa0299432de tempest-ImagesOneServerTestJSON-2032343034 tempest-ImagesOneServerTestJSON-2032343034-project-member] Task: {'id': task-4891962, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61962) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 792.198105] env[61962]: DEBUG oslo_vmware.exceptions [None req-f081d78e-1cf7-4043-9a57-caa0299432de tempest-ImagesOneServerTestJSON-2032343034 tempest-ImagesOneServerTestJSON-2032343034-project-member] Fault InvalidArgument not matched. {{(pid=61962) get_fault_class /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/exceptions.py:290}} [ 792.198368] env[61962]: DEBUG oslo_concurrency.lockutils [None req-f081d78e-1cf7-4043-9a57-caa0299432de tempest-ImagesOneServerTestJSON-2032343034 tempest-ImagesOneServerTestJSON-2032343034-project-member] Releasing lock "[datastore2] devstack-image-cache_base/f684bb17-3ab2-4bd5-a19e-4c36c57d0ebe/f684bb17-3ab2-4bd5-a19e-4c36c57d0ebe.vmdk" {{(pid=61962) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 792.198916] env[61962]: ERROR nova.compute.manager [None req-f081d78e-1cf7-4043-9a57-caa0299432de tempest-ImagesOneServerTestJSON-2032343034 tempest-ImagesOneServerTestJSON-2032343034-project-member] [instance: 6ba9719c-3689-4db6-8ab2-441dc52c7bfd] Instance failed to spawn: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 792.198916] env[61962]: Faults: ['InvalidArgument'] [ 792.198916] env[61962]: ERROR nova.compute.manager [instance: 6ba9719c-3689-4db6-8ab2-441dc52c7bfd] Traceback (most recent call last): [ 792.198916] env[61962]: ERROR nova.compute.manager [instance: 6ba9719c-3689-4db6-8ab2-441dc52c7bfd] File "/opt/stack/nova/nova/compute/manager.py", line 2886, in _build_resources [ 792.198916] env[61962]: ERROR nova.compute.manager [instance: 6ba9719c-3689-4db6-8ab2-441dc52c7bfd] yield resources [ 792.198916] env[61962]: ERROR nova.compute.manager [instance: 6ba9719c-3689-4db6-8ab2-441dc52c7bfd] File "/opt/stack/nova/nova/compute/manager.py", line 2633, in _build_and_run_instance [ 792.198916] env[61962]: ERROR nova.compute.manager [instance: 6ba9719c-3689-4db6-8ab2-441dc52c7bfd] self.driver.spawn(context, instance, image_meta, [ 792.198916] env[61962]: ERROR nova.compute.manager [instance: 6ba9719c-3689-4db6-8ab2-441dc52c7bfd] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 792.198916] env[61962]: ERROR nova.compute.manager [instance: 6ba9719c-3689-4db6-8ab2-441dc52c7bfd] self._vmops.spawn(context, instance, image_meta, injected_files, [ 792.198916] env[61962]: ERROR nova.compute.manager [instance: 6ba9719c-3689-4db6-8ab2-441dc52c7bfd] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 792.198916] env[61962]: ERROR nova.compute.manager [instance: 6ba9719c-3689-4db6-8ab2-441dc52c7bfd] self._fetch_image_if_missing(context, vi) [ 792.198916] env[61962]: ERROR nova.compute.manager [instance: 6ba9719c-3689-4db6-8ab2-441dc52c7bfd] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 792.200335] env[61962]: ERROR nova.compute.manager [instance: 6ba9719c-3689-4db6-8ab2-441dc52c7bfd] image_cache(vi, tmp_image_ds_loc) [ 792.200335] env[61962]: ERROR nova.compute.manager [instance: 6ba9719c-3689-4db6-8ab2-441dc52c7bfd] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 792.200335] env[61962]: ERROR nova.compute.manager [instance: 6ba9719c-3689-4db6-8ab2-441dc52c7bfd] vm_util.copy_virtual_disk( [ 792.200335] env[61962]: ERROR nova.compute.manager [instance: 6ba9719c-3689-4db6-8ab2-441dc52c7bfd] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 792.200335] env[61962]: ERROR nova.compute.manager [instance: 6ba9719c-3689-4db6-8ab2-441dc52c7bfd] session._wait_for_task(vmdk_copy_task) [ 792.200335] env[61962]: ERROR nova.compute.manager [instance: 6ba9719c-3689-4db6-8ab2-441dc52c7bfd] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 792.200335] env[61962]: ERROR nova.compute.manager [instance: 6ba9719c-3689-4db6-8ab2-441dc52c7bfd] return self.wait_for_task(task_ref) [ 792.200335] env[61962]: ERROR nova.compute.manager [instance: 6ba9719c-3689-4db6-8ab2-441dc52c7bfd] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 792.200335] env[61962]: ERROR nova.compute.manager [instance: 6ba9719c-3689-4db6-8ab2-441dc52c7bfd] return evt.wait() [ 792.200335] env[61962]: ERROR nova.compute.manager [instance: 6ba9719c-3689-4db6-8ab2-441dc52c7bfd] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 792.200335] env[61962]: ERROR nova.compute.manager [instance: 6ba9719c-3689-4db6-8ab2-441dc52c7bfd] result = hub.switch() [ 792.200335] env[61962]: ERROR nova.compute.manager [instance: 6ba9719c-3689-4db6-8ab2-441dc52c7bfd] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 792.200335] env[61962]: ERROR nova.compute.manager [instance: 6ba9719c-3689-4db6-8ab2-441dc52c7bfd] return self.greenlet.switch() [ 792.204047] env[61962]: ERROR nova.compute.manager [instance: 6ba9719c-3689-4db6-8ab2-441dc52c7bfd] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 792.204047] env[61962]: ERROR nova.compute.manager [instance: 6ba9719c-3689-4db6-8ab2-441dc52c7bfd] self.f(*self.args, **self.kw) [ 792.204047] env[61962]: ERROR nova.compute.manager [instance: 6ba9719c-3689-4db6-8ab2-441dc52c7bfd] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 792.204047] env[61962]: ERROR nova.compute.manager [instance: 6ba9719c-3689-4db6-8ab2-441dc52c7bfd] raise exceptions.translate_fault(task_info.error) [ 792.204047] env[61962]: ERROR nova.compute.manager [instance: 6ba9719c-3689-4db6-8ab2-441dc52c7bfd] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 792.204047] env[61962]: ERROR nova.compute.manager [instance: 6ba9719c-3689-4db6-8ab2-441dc52c7bfd] Faults: ['InvalidArgument'] [ 792.204047] env[61962]: ERROR nova.compute.manager [instance: 6ba9719c-3689-4db6-8ab2-441dc52c7bfd] [ 792.204047] env[61962]: INFO nova.compute.manager [None req-f081d78e-1cf7-4043-9a57-caa0299432de tempest-ImagesOneServerTestJSON-2032343034 tempest-ImagesOneServerTestJSON-2032343034-project-member] [instance: 6ba9719c-3689-4db6-8ab2-441dc52c7bfd] Terminating instance [ 792.204047] env[61962]: DEBUG oslo_concurrency.lockutils [None req-eaddb9ef-13ca-4c01-8d47-47e1b7975f32 tempest-TenantUsagesTestJSON-1044362109 tempest-TenantUsagesTestJSON-1044362109-project-member] Acquired lock "[datastore2] devstack-image-cache_base/f684bb17-3ab2-4bd5-a19e-4c36c57d0ebe/f684bb17-3ab2-4bd5-a19e-4c36c57d0ebe.vmdk" {{(pid=61962) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 792.204420] env[61962]: DEBUG nova.virt.vmwareapi.ds_util [None req-eaddb9ef-13ca-4c01-8d47-47e1b7975f32 tempest-TenantUsagesTestJSON-1044362109 tempest-TenantUsagesTestJSON-1044362109-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=61962) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 792.204420] env[61962]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-8580f8d3-54f1-44c4-8403-d545f328b272 {{(pid=61962) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 792.204420] env[61962]: DEBUG nova.compute.manager [None req-f081d78e-1cf7-4043-9a57-caa0299432de tempest-ImagesOneServerTestJSON-2032343034 tempest-ImagesOneServerTestJSON-2032343034-project-member] [instance: 6ba9719c-3689-4db6-8ab2-441dc52c7bfd] Start destroying the instance on the hypervisor. {{(pid=61962) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3142}} [ 792.204574] env[61962]: DEBUG nova.virt.vmwareapi.vmops [None req-f081d78e-1cf7-4043-9a57-caa0299432de tempest-ImagesOneServerTestJSON-2032343034 tempest-ImagesOneServerTestJSON-2032343034-project-member] [instance: 6ba9719c-3689-4db6-8ab2-441dc52c7bfd] Destroying instance {{(pid=61962) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 792.205369] env[61962]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a2e11d22-582b-4f69-880d-fae78d1e180e {{(pid=61962) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 792.213865] env[61962]: DEBUG nova.virt.vmwareapi.vmops [None req-f081d78e-1cf7-4043-9a57-caa0299432de tempest-ImagesOneServerTestJSON-2032343034 tempest-ImagesOneServerTestJSON-2032343034-project-member] [instance: 6ba9719c-3689-4db6-8ab2-441dc52c7bfd] Unregistering the VM {{(pid=61962) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 792.215246] env[61962]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-9ae637e9-4d50-4a7c-ab70-b6d27dc1bd83 {{(pid=61962) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 792.217009] env[61962]: DEBUG nova.virt.vmwareapi.ds_util [None req-eaddb9ef-13ca-4c01-8d47-47e1b7975f32 tempest-TenantUsagesTestJSON-1044362109 tempest-TenantUsagesTestJSON-1044362109-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=61962) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 792.217205] env[61962]: DEBUG nova.virt.vmwareapi.vmops [None req-eaddb9ef-13ca-4c01-8d47-47e1b7975f32 tempest-TenantUsagesTestJSON-1044362109 tempest-TenantUsagesTestJSON-1044362109-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=61962) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 792.217871] env[61962]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-948741b4-149e-4358-96ed-fe4df740ba7b {{(pid=61962) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 792.225212] env[61962]: DEBUG oslo_vmware.api [None req-eaddb9ef-13ca-4c01-8d47-47e1b7975f32 tempest-TenantUsagesTestJSON-1044362109 tempest-TenantUsagesTestJSON-1044362109-project-member] Waiting for the task: (returnval){ [ 792.225212] env[61962]: value = "session[5210918f-aadc-9b29-42fa-0929c2e42627]52ed964a-cd07-9680-497f-1adf9a6e422c" [ 792.225212] env[61962]: _type = "Task" [ 792.225212] env[61962]: } to complete. {{(pid=61962) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 792.234865] env[61962]: DEBUG oslo_vmware.api [None req-eaddb9ef-13ca-4c01-8d47-47e1b7975f32 tempest-TenantUsagesTestJSON-1044362109 tempest-TenantUsagesTestJSON-1044362109-project-member] Task: {'id': session[5210918f-aadc-9b29-42fa-0929c2e42627]52ed964a-cd07-9680-497f-1adf9a6e422c, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61962) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 792.290664] env[61962]: DEBUG nova.virt.vmwareapi.vmops [None req-f081d78e-1cf7-4043-9a57-caa0299432de tempest-ImagesOneServerTestJSON-2032343034 tempest-ImagesOneServerTestJSON-2032343034-project-member] [instance: 6ba9719c-3689-4db6-8ab2-441dc52c7bfd] Unregistered the VM {{(pid=61962) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 792.290816] env[61962]: DEBUG nova.virt.vmwareapi.vmops [None req-f081d78e-1cf7-4043-9a57-caa0299432de tempest-ImagesOneServerTestJSON-2032343034 tempest-ImagesOneServerTestJSON-2032343034-project-member] [instance: 6ba9719c-3689-4db6-8ab2-441dc52c7bfd] Deleting contents of the VM from datastore datastore2 {{(pid=61962) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 792.290984] env[61962]: DEBUG nova.virt.vmwareapi.ds_util [None req-f081d78e-1cf7-4043-9a57-caa0299432de tempest-ImagesOneServerTestJSON-2032343034 tempest-ImagesOneServerTestJSON-2032343034-project-member] Deleting the datastore file [datastore2] 6ba9719c-3689-4db6-8ab2-441dc52c7bfd {{(pid=61962) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 792.291278] env[61962]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-9f625a77-7073-4aef-a742-f0a4b0072445 {{(pid=61962) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 792.299854] env[61962]: DEBUG oslo_vmware.api [None req-f081d78e-1cf7-4043-9a57-caa0299432de tempest-ImagesOneServerTestJSON-2032343034 tempest-ImagesOneServerTestJSON-2032343034-project-member] Waiting for the task: (returnval){ [ 792.299854] env[61962]: value = "task-4891964" [ 792.299854] env[61962]: _type = "Task" [ 792.299854] env[61962]: } to complete. {{(pid=61962) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 792.309846] env[61962]: DEBUG oslo_vmware.api [None req-f081d78e-1cf7-4043-9a57-caa0299432de tempest-ImagesOneServerTestJSON-2032343034 tempest-ImagesOneServerTestJSON-2032343034-project-member] Task: {'id': task-4891964, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61962) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 792.736216] env[61962]: DEBUG nova.virt.vmwareapi.vmops [None req-eaddb9ef-13ca-4c01-8d47-47e1b7975f32 tempest-TenantUsagesTestJSON-1044362109 tempest-TenantUsagesTestJSON-1044362109-project-member] [instance: 1a7d5a5f-c5b1-40d2-828b-090fb8e76e3d] Preparing fetch location {{(pid=61962) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 792.736577] env[61962]: DEBUG nova.virt.vmwareapi.ds_util [None req-eaddb9ef-13ca-4c01-8d47-47e1b7975f32 tempest-TenantUsagesTestJSON-1044362109 tempest-TenantUsagesTestJSON-1044362109-project-member] Creating directory with path [datastore2] vmware_temp/cf4d681e-fdf2-4685-b627-fc5b543c2292/f684bb17-3ab2-4bd5-a19e-4c36c57d0ebe {{(pid=61962) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 792.736856] env[61962]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-cde4a010-5936-448d-af34-cfe38f2964bf {{(pid=61962) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 792.751341] env[61962]: DEBUG nova.virt.vmwareapi.ds_util [None req-eaddb9ef-13ca-4c01-8d47-47e1b7975f32 tempest-TenantUsagesTestJSON-1044362109 tempest-TenantUsagesTestJSON-1044362109-project-member] Created directory with path [datastore2] vmware_temp/cf4d681e-fdf2-4685-b627-fc5b543c2292/f684bb17-3ab2-4bd5-a19e-4c36c57d0ebe {{(pid=61962) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 792.751602] env[61962]: DEBUG nova.virt.vmwareapi.vmops [None req-eaddb9ef-13ca-4c01-8d47-47e1b7975f32 tempest-TenantUsagesTestJSON-1044362109 tempest-TenantUsagesTestJSON-1044362109-project-member] [instance: 1a7d5a5f-c5b1-40d2-828b-090fb8e76e3d] Fetch image to [datastore2] vmware_temp/cf4d681e-fdf2-4685-b627-fc5b543c2292/f684bb17-3ab2-4bd5-a19e-4c36c57d0ebe/tmp-sparse.vmdk {{(pid=61962) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 792.751798] env[61962]: DEBUG nova.virt.vmwareapi.vmops [None req-eaddb9ef-13ca-4c01-8d47-47e1b7975f32 tempest-TenantUsagesTestJSON-1044362109 tempest-TenantUsagesTestJSON-1044362109-project-member] [instance: 1a7d5a5f-c5b1-40d2-828b-090fb8e76e3d] Downloading image file data f684bb17-3ab2-4bd5-a19e-4c36c57d0ebe to [datastore2] vmware_temp/cf4d681e-fdf2-4685-b627-fc5b543c2292/f684bb17-3ab2-4bd5-a19e-4c36c57d0ebe/tmp-sparse.vmdk on the data store datastore2 {{(pid=61962) _fetch_image_as_file /opt/stack/nova/nova/virt/vmwareapi/vmops.py:399}} [ 792.752619] env[61962]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-115a01d2-f3d6-4a29-9b29-16faca0683e3 {{(pid=61962) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 792.762275] env[61962]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e170709f-cee2-461f-b1e0-6588e9cd8077 {{(pid=61962) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 792.776688] env[61962]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a57b41ef-26c8-4738-9ced-f1b1d96ae249 {{(pid=61962) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 792.821818] env[61962]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-89e51f33-6352-489d-a4e1-a050b3445392 {{(pid=61962) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 792.832803] env[61962]: DEBUG oslo_vmware.api [None req-f081d78e-1cf7-4043-9a57-caa0299432de tempest-ImagesOneServerTestJSON-2032343034 tempest-ImagesOneServerTestJSON-2032343034-project-member] Task: {'id': task-4891964, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.102739} completed successfully. {{(pid=61962) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 792.833158] env[61962]: DEBUG oslo_vmware.service [-] Invoking SessionManager.AcquireGenericServiceTicket with opID=oslo.vmware-fd86d300-8f99-4a30-bbd7-77fb595c9afe {{(pid=61962) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 792.835706] env[61962]: DEBUG nova.virt.vmwareapi.ds_util [None req-f081d78e-1cf7-4043-9a57-caa0299432de tempest-ImagesOneServerTestJSON-2032343034 tempest-ImagesOneServerTestJSON-2032343034-project-member] Deleted the datastore file {{(pid=61962) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 792.839086] env[61962]: DEBUG nova.virt.vmwareapi.vmops [None req-f081d78e-1cf7-4043-9a57-caa0299432de tempest-ImagesOneServerTestJSON-2032343034 tempest-ImagesOneServerTestJSON-2032343034-project-member] [instance: 6ba9719c-3689-4db6-8ab2-441dc52c7bfd] Deleted contents of the VM from datastore datastore2 {{(pid=61962) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 792.839086] env[61962]: DEBUG nova.virt.vmwareapi.vmops [None req-f081d78e-1cf7-4043-9a57-caa0299432de tempest-ImagesOneServerTestJSON-2032343034 tempest-ImagesOneServerTestJSON-2032343034-project-member] [instance: 6ba9719c-3689-4db6-8ab2-441dc52c7bfd] Instance destroyed {{(pid=61962) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 792.839086] env[61962]: INFO nova.compute.manager [None req-f081d78e-1cf7-4043-9a57-caa0299432de tempest-ImagesOneServerTestJSON-2032343034 tempest-ImagesOneServerTestJSON-2032343034-project-member] [instance: 6ba9719c-3689-4db6-8ab2-441dc52c7bfd] Took 0.63 seconds to destroy the instance on the hypervisor. [ 792.839499] env[61962]: DEBUG nova.compute.claims [None req-f081d78e-1cf7-4043-9a57-caa0299432de tempest-ImagesOneServerTestJSON-2032343034 tempest-ImagesOneServerTestJSON-2032343034-project-member] [instance: 6ba9719c-3689-4db6-8ab2-441dc52c7bfd] Aborting claim: {{(pid=61962) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 792.839682] env[61962]: DEBUG oslo_concurrency.lockutils [None req-f081d78e-1cf7-4043-9a57-caa0299432de tempest-ImagesOneServerTestJSON-2032343034 tempest-ImagesOneServerTestJSON-2032343034-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=61962) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 792.839896] env[61962]: DEBUG oslo_concurrency.lockutils [None req-f081d78e-1cf7-4043-9a57-caa0299432de tempest-ImagesOneServerTestJSON-2032343034 tempest-ImagesOneServerTestJSON-2032343034-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 0.000s {{(pid=61962) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 792.868313] env[61962]: DEBUG nova.virt.vmwareapi.images [None req-eaddb9ef-13ca-4c01-8d47-47e1b7975f32 tempest-TenantUsagesTestJSON-1044362109 tempest-TenantUsagesTestJSON-1044362109-project-member] [instance: 1a7d5a5f-c5b1-40d2-828b-090fb8e76e3d] Downloading image file data f684bb17-3ab2-4bd5-a19e-4c36c57d0ebe to the data store datastore2 {{(pid=61962) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:245}} [ 792.949359] env[61962]: DEBUG oslo_vmware.rw_handles [None req-eaddb9ef-13ca-4c01-8d47-47e1b7975f32 tempest-TenantUsagesTestJSON-1044362109 tempest-TenantUsagesTestJSON-1044362109-project-member] Creating HTTP connection to write to file with size = 21318656 and URL = https://esx7c2n1.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/cf4d681e-fdf2-4685-b627-fc5b543c2292/f684bb17-3ab2-4bd5-a19e-4c36c57d0ebe/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=61962) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 793.026529] env[61962]: DEBUG oslo_vmware.rw_handles [None req-eaddb9ef-13ca-4c01-8d47-47e1b7975f32 tempest-TenantUsagesTestJSON-1044362109 tempest-TenantUsagesTestJSON-1044362109-project-member] Completed reading data from the image iterator. {{(pid=61962) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 793.026803] env[61962]: DEBUG oslo_vmware.rw_handles [None req-eaddb9ef-13ca-4c01-8d47-47e1b7975f32 tempest-TenantUsagesTestJSON-1044362109 tempest-TenantUsagesTestJSON-1044362109-project-member] Closing write handle for https://esx7c2n1.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/cf4d681e-fdf2-4685-b627-fc5b543c2292/f684bb17-3ab2-4bd5-a19e-4c36c57d0ebe/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=61962) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:281}} [ 793.455095] env[61962]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0b2da2ae-36bb-43e9-9c1d-ff4ddd1f8384 {{(pid=61962) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 793.463535] env[61962]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-abb0ab32-f41e-4ab4-822e-d9c6bd886ccb {{(pid=61962) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 793.496971] env[61962]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-012343e3-cf23-4835-8f31-80767be1f737 {{(pid=61962) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 793.504351] env[61962]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6ba57a9f-d0a0-4a93-adb0-6fced04d5d85 {{(pid=61962) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 793.518781] env[61962]: DEBUG nova.compute.provider_tree [None req-f081d78e-1cf7-4043-9a57-caa0299432de tempest-ImagesOneServerTestJSON-2032343034 tempest-ImagesOneServerTestJSON-2032343034-project-member] Inventory has not changed in ProviderTree for provider: 5a20dc57-fddd-49ec-bab5-953a03eebaa1 {{(pid=61962) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 793.532308] env[61962]: DEBUG nova.scheduler.client.report [None req-f081d78e-1cf7-4043-9a57-caa0299432de tempest-ImagesOneServerTestJSON-2032343034 tempest-ImagesOneServerTestJSON-2032343034-project-member] Inventory has not changed for provider 5a20dc57-fddd-49ec-bab5-953a03eebaa1 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 96, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61962) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 793.554810] env[61962]: DEBUG oslo_concurrency.lockutils [None req-f081d78e-1cf7-4043-9a57-caa0299432de tempest-ImagesOneServerTestJSON-2032343034 tempest-ImagesOneServerTestJSON-2032343034-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 0.715s {{(pid=61962) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 793.555364] env[61962]: ERROR nova.compute.manager [None req-f081d78e-1cf7-4043-9a57-caa0299432de tempest-ImagesOneServerTestJSON-2032343034 tempest-ImagesOneServerTestJSON-2032343034-project-member] [instance: 6ba9719c-3689-4db6-8ab2-441dc52c7bfd] Failed to build and run instance: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 793.555364] env[61962]: Faults: ['InvalidArgument'] [ 793.555364] env[61962]: ERROR nova.compute.manager [instance: 6ba9719c-3689-4db6-8ab2-441dc52c7bfd] Traceback (most recent call last): [ 793.555364] env[61962]: ERROR nova.compute.manager [instance: 6ba9719c-3689-4db6-8ab2-441dc52c7bfd] File "/opt/stack/nova/nova/compute/manager.py", line 2633, in _build_and_run_instance [ 793.555364] env[61962]: ERROR nova.compute.manager [instance: 6ba9719c-3689-4db6-8ab2-441dc52c7bfd] self.driver.spawn(context, instance, image_meta, [ 793.555364] env[61962]: ERROR nova.compute.manager [instance: 6ba9719c-3689-4db6-8ab2-441dc52c7bfd] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 793.555364] env[61962]: ERROR nova.compute.manager [instance: 6ba9719c-3689-4db6-8ab2-441dc52c7bfd] self._vmops.spawn(context, instance, image_meta, injected_files, [ 793.555364] env[61962]: ERROR nova.compute.manager [instance: 6ba9719c-3689-4db6-8ab2-441dc52c7bfd] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 793.555364] env[61962]: ERROR nova.compute.manager [instance: 6ba9719c-3689-4db6-8ab2-441dc52c7bfd] self._fetch_image_if_missing(context, vi) [ 793.555364] env[61962]: ERROR nova.compute.manager [instance: 6ba9719c-3689-4db6-8ab2-441dc52c7bfd] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 793.555364] env[61962]: ERROR nova.compute.manager [instance: 6ba9719c-3689-4db6-8ab2-441dc52c7bfd] image_cache(vi, tmp_image_ds_loc) [ 793.555364] env[61962]: ERROR nova.compute.manager [instance: 6ba9719c-3689-4db6-8ab2-441dc52c7bfd] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 793.555750] env[61962]: ERROR nova.compute.manager [instance: 6ba9719c-3689-4db6-8ab2-441dc52c7bfd] vm_util.copy_virtual_disk( [ 793.555750] env[61962]: ERROR nova.compute.manager [instance: 6ba9719c-3689-4db6-8ab2-441dc52c7bfd] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 793.555750] env[61962]: ERROR nova.compute.manager [instance: 6ba9719c-3689-4db6-8ab2-441dc52c7bfd] session._wait_for_task(vmdk_copy_task) [ 793.555750] env[61962]: ERROR nova.compute.manager [instance: 6ba9719c-3689-4db6-8ab2-441dc52c7bfd] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 793.555750] env[61962]: ERROR nova.compute.manager [instance: 6ba9719c-3689-4db6-8ab2-441dc52c7bfd] return self.wait_for_task(task_ref) [ 793.555750] env[61962]: ERROR nova.compute.manager [instance: 6ba9719c-3689-4db6-8ab2-441dc52c7bfd] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 793.555750] env[61962]: ERROR nova.compute.manager [instance: 6ba9719c-3689-4db6-8ab2-441dc52c7bfd] return evt.wait() [ 793.555750] env[61962]: ERROR nova.compute.manager [instance: 6ba9719c-3689-4db6-8ab2-441dc52c7bfd] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 793.555750] env[61962]: ERROR nova.compute.manager [instance: 6ba9719c-3689-4db6-8ab2-441dc52c7bfd] result = hub.switch() [ 793.555750] env[61962]: ERROR nova.compute.manager [instance: 6ba9719c-3689-4db6-8ab2-441dc52c7bfd] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 793.555750] env[61962]: ERROR nova.compute.manager [instance: 6ba9719c-3689-4db6-8ab2-441dc52c7bfd] return self.greenlet.switch() [ 793.555750] env[61962]: ERROR nova.compute.manager [instance: 6ba9719c-3689-4db6-8ab2-441dc52c7bfd] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 793.555750] env[61962]: ERROR nova.compute.manager [instance: 6ba9719c-3689-4db6-8ab2-441dc52c7bfd] self.f(*self.args, **self.kw) [ 793.556103] env[61962]: ERROR nova.compute.manager [instance: 6ba9719c-3689-4db6-8ab2-441dc52c7bfd] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 793.556103] env[61962]: ERROR nova.compute.manager [instance: 6ba9719c-3689-4db6-8ab2-441dc52c7bfd] raise exceptions.translate_fault(task_info.error) [ 793.556103] env[61962]: ERROR nova.compute.manager [instance: 6ba9719c-3689-4db6-8ab2-441dc52c7bfd] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 793.556103] env[61962]: ERROR nova.compute.manager [instance: 6ba9719c-3689-4db6-8ab2-441dc52c7bfd] Faults: ['InvalidArgument'] [ 793.556103] env[61962]: ERROR nova.compute.manager [instance: 6ba9719c-3689-4db6-8ab2-441dc52c7bfd] [ 793.556244] env[61962]: DEBUG nova.compute.utils [None req-f081d78e-1cf7-4043-9a57-caa0299432de tempest-ImagesOneServerTestJSON-2032343034 tempest-ImagesOneServerTestJSON-2032343034-project-member] [instance: 6ba9719c-3689-4db6-8ab2-441dc52c7bfd] VimFaultException {{(pid=61962) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 793.558060] env[61962]: DEBUG nova.compute.manager [None req-f081d78e-1cf7-4043-9a57-caa0299432de tempest-ImagesOneServerTestJSON-2032343034 tempest-ImagesOneServerTestJSON-2032343034-project-member] [instance: 6ba9719c-3689-4db6-8ab2-441dc52c7bfd] Build of instance 6ba9719c-3689-4db6-8ab2-441dc52c7bfd was re-scheduled: A specified parameter was not correct: fileType [ 793.558060] env[61962]: Faults: ['InvalidArgument'] {{(pid=61962) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2472}} [ 793.558198] env[61962]: DEBUG nova.compute.manager [None req-f081d78e-1cf7-4043-9a57-caa0299432de tempest-ImagesOneServerTestJSON-2032343034 tempest-ImagesOneServerTestJSON-2032343034-project-member] [instance: 6ba9719c-3689-4db6-8ab2-441dc52c7bfd] Unplugging VIFs for instance {{(pid=61962) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:2998}} [ 793.558297] env[61962]: DEBUG nova.compute.manager [None req-f081d78e-1cf7-4043-9a57-caa0299432de tempest-ImagesOneServerTestJSON-2032343034 tempest-ImagesOneServerTestJSON-2032343034-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=61962) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3021}} [ 793.558469] env[61962]: DEBUG nova.compute.manager [None req-f081d78e-1cf7-4043-9a57-caa0299432de tempest-ImagesOneServerTestJSON-2032343034 tempest-ImagesOneServerTestJSON-2032343034-project-member] [instance: 6ba9719c-3689-4db6-8ab2-441dc52c7bfd] Deallocating network for instance {{(pid=61962) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2281}} [ 793.558629] env[61962]: DEBUG nova.network.neutron [None req-f081d78e-1cf7-4043-9a57-caa0299432de tempest-ImagesOneServerTestJSON-2032343034 tempest-ImagesOneServerTestJSON-2032343034-project-member] [instance: 6ba9719c-3689-4db6-8ab2-441dc52c7bfd] deallocate_for_instance() {{(pid=61962) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 794.710039] env[61962]: DEBUG nova.network.neutron [None req-f081d78e-1cf7-4043-9a57-caa0299432de tempest-ImagesOneServerTestJSON-2032343034 tempest-ImagesOneServerTestJSON-2032343034-project-member] [instance: 6ba9719c-3689-4db6-8ab2-441dc52c7bfd] Updating instance_info_cache with network_info: [] {{(pid=61962) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 794.726185] env[61962]: INFO nova.compute.manager [None req-f081d78e-1cf7-4043-9a57-caa0299432de tempest-ImagesOneServerTestJSON-2032343034 tempest-ImagesOneServerTestJSON-2032343034-project-member] [instance: 6ba9719c-3689-4db6-8ab2-441dc52c7bfd] Took 1.17 seconds to deallocate network for instance. [ 794.855026] env[61962]: INFO nova.scheduler.client.report [None req-f081d78e-1cf7-4043-9a57-caa0299432de tempest-ImagesOneServerTestJSON-2032343034 tempest-ImagesOneServerTestJSON-2032343034-project-member] Deleted allocations for instance 6ba9719c-3689-4db6-8ab2-441dc52c7bfd [ 794.903016] env[61962]: DEBUG oslo_concurrency.lockutils [None req-f081d78e-1cf7-4043-9a57-caa0299432de tempest-ImagesOneServerTestJSON-2032343034 tempest-ImagesOneServerTestJSON-2032343034-project-member] Lock "6ba9719c-3689-4db6-8ab2-441dc52c7bfd" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 244.410s {{(pid=61962) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 794.904588] env[61962]: DEBUG oslo_concurrency.lockutils [None req-6558e0f3-49b6-441c-bbcf-6cbf54407ba8 tempest-ImagesOneServerTestJSON-2032343034 tempest-ImagesOneServerTestJSON-2032343034-project-member] Lock "6ba9719c-3689-4db6-8ab2-441dc52c7bfd" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 44.375s {{(pid=61962) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 794.904798] env[61962]: DEBUG oslo_concurrency.lockutils [None req-6558e0f3-49b6-441c-bbcf-6cbf54407ba8 tempest-ImagesOneServerTestJSON-2032343034 tempest-ImagesOneServerTestJSON-2032343034-project-member] Acquiring lock "6ba9719c-3689-4db6-8ab2-441dc52c7bfd-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=61962) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 794.907600] env[61962]: DEBUG oslo_concurrency.lockutils [None req-6558e0f3-49b6-441c-bbcf-6cbf54407ba8 tempest-ImagesOneServerTestJSON-2032343034 tempest-ImagesOneServerTestJSON-2032343034-project-member] Lock "6ba9719c-3689-4db6-8ab2-441dc52c7bfd-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=61962) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 794.907905] env[61962]: DEBUG oslo_concurrency.lockutils [None req-6558e0f3-49b6-441c-bbcf-6cbf54407ba8 tempest-ImagesOneServerTestJSON-2032343034 tempest-ImagesOneServerTestJSON-2032343034-project-member] Lock "6ba9719c-3689-4db6-8ab2-441dc52c7bfd-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.003s {{(pid=61962) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 794.911668] env[61962]: INFO nova.compute.manager [None req-6558e0f3-49b6-441c-bbcf-6cbf54407ba8 tempest-ImagesOneServerTestJSON-2032343034 tempest-ImagesOneServerTestJSON-2032343034-project-member] [instance: 6ba9719c-3689-4db6-8ab2-441dc52c7bfd] Terminating instance [ 794.913989] env[61962]: DEBUG nova.compute.manager [None req-6558e0f3-49b6-441c-bbcf-6cbf54407ba8 tempest-ImagesOneServerTestJSON-2032343034 tempest-ImagesOneServerTestJSON-2032343034-project-member] [instance: 6ba9719c-3689-4db6-8ab2-441dc52c7bfd] Start destroying the instance on the hypervisor. {{(pid=61962) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3142}} [ 794.914176] env[61962]: DEBUG nova.virt.vmwareapi.vmops [None req-6558e0f3-49b6-441c-bbcf-6cbf54407ba8 tempest-ImagesOneServerTestJSON-2032343034 tempest-ImagesOneServerTestJSON-2032343034-project-member] [instance: 6ba9719c-3689-4db6-8ab2-441dc52c7bfd] Destroying instance {{(pid=61962) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 794.914440] env[61962]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-51777862-6f6a-4b1f-8b84-785b7df5f181 {{(pid=61962) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 794.924679] env[61962]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-08f6000e-7f09-4e52-a1bf-267a292be802 {{(pid=61962) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 794.939601] env[61962]: DEBUG nova.compute.manager [None req-b1726c72-0590-4b2f-925e-e9989081dc51 tempest-ServersAdminNegativeTestJSON-1331122422 tempest-ServersAdminNegativeTestJSON-1331122422-project-member] [instance: 968692bd-12ae-46aa-b1de-5cd312f436ee] Starting instance... {{(pid=61962) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2424}} [ 794.971479] env[61962]: WARNING nova.virt.vmwareapi.vmops [None req-6558e0f3-49b6-441c-bbcf-6cbf54407ba8 tempest-ImagesOneServerTestJSON-2032343034 tempest-ImagesOneServerTestJSON-2032343034-project-member] [instance: 6ba9719c-3689-4db6-8ab2-441dc52c7bfd] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 6ba9719c-3689-4db6-8ab2-441dc52c7bfd could not be found. [ 794.971716] env[61962]: DEBUG nova.virt.vmwareapi.vmops [None req-6558e0f3-49b6-441c-bbcf-6cbf54407ba8 tempest-ImagesOneServerTestJSON-2032343034 tempest-ImagesOneServerTestJSON-2032343034-project-member] [instance: 6ba9719c-3689-4db6-8ab2-441dc52c7bfd] Instance destroyed {{(pid=61962) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 794.972232] env[61962]: INFO nova.compute.manager [None req-6558e0f3-49b6-441c-bbcf-6cbf54407ba8 tempest-ImagesOneServerTestJSON-2032343034 tempest-ImagesOneServerTestJSON-2032343034-project-member] [instance: 6ba9719c-3689-4db6-8ab2-441dc52c7bfd] Took 0.06 seconds to destroy the instance on the hypervisor. [ 794.972290] env[61962]: DEBUG oslo.service.loopingcall [None req-6558e0f3-49b6-441c-bbcf-6cbf54407ba8 tempest-ImagesOneServerTestJSON-2032343034 tempest-ImagesOneServerTestJSON-2032343034-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61962) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 794.972565] env[61962]: DEBUG nova.compute.manager [-] [instance: 6ba9719c-3689-4db6-8ab2-441dc52c7bfd] Deallocating network for instance {{(pid=61962) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2281}} [ 794.972633] env[61962]: DEBUG nova.network.neutron [-] [instance: 6ba9719c-3689-4db6-8ab2-441dc52c7bfd] deallocate_for_instance() {{(pid=61962) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 794.978040] env[61962]: DEBUG nova.compute.manager [None req-b1726c72-0590-4b2f-925e-e9989081dc51 tempest-ServersAdminNegativeTestJSON-1331122422 tempest-ServersAdminNegativeTestJSON-1331122422-project-member] [instance: 968692bd-12ae-46aa-b1de-5cd312f436ee] Instance disappeared before build. {{(pid=61962) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2431}} [ 795.007568] env[61962]: DEBUG oslo_concurrency.lockutils [None req-b1726c72-0590-4b2f-925e-e9989081dc51 tempest-ServersAdminNegativeTestJSON-1331122422 tempest-ServersAdminNegativeTestJSON-1331122422-project-member] Lock "968692bd-12ae-46aa-b1de-5cd312f436ee" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 212.581s {{(pid=61962) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 795.019479] env[61962]: DEBUG nova.compute.manager [None req-74ed5e63-44fa-4df0-9126-61e8698d9b18 tempest-ListServersNegativeTestJSON-662338656 tempest-ListServersNegativeTestJSON-662338656-project-member] [instance: 09098197-59d7-4e14-b76c-8366a1ea5fd6] Starting instance... {{(pid=61962) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2424}} [ 795.049701] env[61962]: DEBUG nova.compute.manager [None req-74ed5e63-44fa-4df0-9126-61e8698d9b18 tempest-ListServersNegativeTestJSON-662338656 tempest-ListServersNegativeTestJSON-662338656-project-member] [instance: 09098197-59d7-4e14-b76c-8366a1ea5fd6] Instance disappeared before build. {{(pid=61962) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2431}} [ 795.076930] env[61962]: DEBUG oslo_concurrency.lockutils [None req-74ed5e63-44fa-4df0-9126-61e8698d9b18 tempest-ListServersNegativeTestJSON-662338656 tempest-ListServersNegativeTestJSON-662338656-project-member] Lock "09098197-59d7-4e14-b76c-8366a1ea5fd6" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 212.420s {{(pid=61962) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 795.090755] env[61962]: DEBUG nova.compute.manager [None req-74ed5e63-44fa-4df0-9126-61e8698d9b18 tempest-ListServersNegativeTestJSON-662338656 tempest-ListServersNegativeTestJSON-662338656-project-member] [instance: 4fcdc4fd-b1f8-4188-a502-49cf675c9bdd] Starting instance... {{(pid=61962) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2424}} [ 795.116345] env[61962]: DEBUG nova.network.neutron [-] [instance: 6ba9719c-3689-4db6-8ab2-441dc52c7bfd] Updating instance_info_cache with network_info: [] {{(pid=61962) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 795.128802] env[61962]: DEBUG nova.compute.manager [None req-74ed5e63-44fa-4df0-9126-61e8698d9b18 tempest-ListServersNegativeTestJSON-662338656 tempest-ListServersNegativeTestJSON-662338656-project-member] [instance: 4fcdc4fd-b1f8-4188-a502-49cf675c9bdd] Instance disappeared before build. {{(pid=61962) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2431}} [ 795.133889] env[61962]: INFO nova.compute.manager [-] [instance: 6ba9719c-3689-4db6-8ab2-441dc52c7bfd] Took 0.16 seconds to deallocate network for instance. [ 795.166124] env[61962]: DEBUG oslo_concurrency.lockutils [None req-74ed5e63-44fa-4df0-9126-61e8698d9b18 tempest-ListServersNegativeTestJSON-662338656 tempest-ListServersNegativeTestJSON-662338656-project-member] Lock "4fcdc4fd-b1f8-4188-a502-49cf675c9bdd" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 212.477s {{(pid=61962) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 795.180911] env[61962]: DEBUG nova.compute.manager [None req-74ed5e63-44fa-4df0-9126-61e8698d9b18 tempest-ListServersNegativeTestJSON-662338656 tempest-ListServersNegativeTestJSON-662338656-project-member] [instance: d6fc81f2-0fa6-4e9c-8d1b-60f0038112b0] Starting instance... {{(pid=61962) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2424}} [ 795.219165] env[61962]: DEBUG nova.compute.manager [None req-74ed5e63-44fa-4df0-9126-61e8698d9b18 tempest-ListServersNegativeTestJSON-662338656 tempest-ListServersNegativeTestJSON-662338656-project-member] [instance: d6fc81f2-0fa6-4e9c-8d1b-60f0038112b0] Instance disappeared before build. {{(pid=61962) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2431}} [ 795.268100] env[61962]: DEBUG oslo_concurrency.lockutils [None req-74ed5e63-44fa-4df0-9126-61e8698d9b18 tempest-ListServersNegativeTestJSON-662338656 tempest-ListServersNegativeTestJSON-662338656-project-member] Lock "d6fc81f2-0fa6-4e9c-8d1b-60f0038112b0" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 212.548s {{(pid=61962) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 795.296028] env[61962]: DEBUG nova.compute.manager [None req-61235e9d-5a5f-4393-bfa4-a1ae88f788e2 tempest-ServersWithSpecificFlavorTestJSON-2100413347 tempest-ServersWithSpecificFlavorTestJSON-2100413347-project-member] [instance: 511ce01a-0bea-49ba-b0e3-2a006ee88d32] Starting instance... {{(pid=61962) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2424}} [ 795.391666] env[61962]: DEBUG nova.compute.manager [None req-61235e9d-5a5f-4393-bfa4-a1ae88f788e2 tempest-ServersWithSpecificFlavorTestJSON-2100413347 tempest-ServersWithSpecificFlavorTestJSON-2100413347-project-member] [instance: 511ce01a-0bea-49ba-b0e3-2a006ee88d32] Instance disappeared before build. {{(pid=61962) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2431}} [ 795.444209] env[61962]: DEBUG oslo_concurrency.lockutils [None req-61235e9d-5a5f-4393-bfa4-a1ae88f788e2 tempest-ServersWithSpecificFlavorTestJSON-2100413347 tempest-ServersWithSpecificFlavorTestJSON-2100413347-project-member] Lock "511ce01a-0bea-49ba-b0e3-2a006ee88d32" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 211.558s {{(pid=61962) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 795.446886] env[61962]: DEBUG oslo_concurrency.lockutils [None req-6558e0f3-49b6-441c-bbcf-6cbf54407ba8 tempest-ImagesOneServerTestJSON-2032343034 tempest-ImagesOneServerTestJSON-2032343034-project-member] Lock "6ba9719c-3689-4db6-8ab2-441dc52c7bfd" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 0.542s {{(pid=61962) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 795.457860] env[61962]: DEBUG nova.compute.manager [None req-99a9b1b0-e416-4a91-80e7-ba6a6163607b tempest-ImagesOneServerNegativeTestJSON-1618499339 tempest-ImagesOneServerNegativeTestJSON-1618499339-project-member] [instance: b241f341-8413-4f6f-b054-d9209640d652] Starting instance... {{(pid=61962) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2424}} [ 795.498339] env[61962]: DEBUG nova.compute.manager [None req-99a9b1b0-e416-4a91-80e7-ba6a6163607b tempest-ImagesOneServerNegativeTestJSON-1618499339 tempest-ImagesOneServerNegativeTestJSON-1618499339-project-member] [instance: b241f341-8413-4f6f-b054-d9209640d652] Instance disappeared before build. {{(pid=61962) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2431}} [ 795.537157] env[61962]: DEBUG oslo_concurrency.lockutils [None req-99a9b1b0-e416-4a91-80e7-ba6a6163607b tempest-ImagesOneServerNegativeTestJSON-1618499339 tempest-ImagesOneServerNegativeTestJSON-1618499339-project-member] Lock "b241f341-8413-4f6f-b054-d9209640d652" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 210.026s {{(pid=61962) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 795.551406] env[61962]: DEBUG nova.compute.manager [None req-9646297f-687a-455b-8b65-c7b7ac20c2d1 tempest-AttachInterfacesTestJSON-87713823 tempest-AttachInterfacesTestJSON-87713823-project-member] [instance: cfc08011-0291-450a-96eb-c4f8002e07e5] Starting instance... {{(pid=61962) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2424}} [ 795.597287] env[61962]: DEBUG nova.compute.manager [None req-9646297f-687a-455b-8b65-c7b7ac20c2d1 tempest-AttachInterfacesTestJSON-87713823 tempest-AttachInterfacesTestJSON-87713823-project-member] [instance: cfc08011-0291-450a-96eb-c4f8002e07e5] Instance disappeared before build. {{(pid=61962) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2431}} [ 795.632421] env[61962]: DEBUG oslo_concurrency.lockutils [None req-9646297f-687a-455b-8b65-c7b7ac20c2d1 tempest-AttachInterfacesTestJSON-87713823 tempest-AttachInterfacesTestJSON-87713823-project-member] Lock "cfc08011-0291-450a-96eb-c4f8002e07e5" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 204.678s {{(pid=61962) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 795.642991] env[61962]: DEBUG nova.compute.manager [None req-5534e9bf-049c-4fb6-a71d-c79308fe60f6 tempest-ImagesNegativeTestJSON-192701559 tempest-ImagesNegativeTestJSON-192701559-project-member] [instance: a20846de-721a-4bb9-9826-e7c7ebf170c4] Starting instance... {{(pid=61962) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2424}} [ 795.671518] env[61962]: DEBUG nova.compute.manager [None req-5534e9bf-049c-4fb6-a71d-c79308fe60f6 tempest-ImagesNegativeTestJSON-192701559 tempest-ImagesNegativeTestJSON-192701559-project-member] [instance: a20846de-721a-4bb9-9826-e7c7ebf170c4] Instance disappeared before build. {{(pid=61962) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2431}} [ 795.705651] env[61962]: DEBUG oslo_concurrency.lockutils [None req-5534e9bf-049c-4fb6-a71d-c79308fe60f6 tempest-ImagesNegativeTestJSON-192701559 tempest-ImagesNegativeTestJSON-192701559-project-member] Lock "a20846de-721a-4bb9-9826-e7c7ebf170c4" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 202.651s {{(pid=61962) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 795.717727] env[61962]: DEBUG nova.compute.manager [None req-7546295f-5c69-4e76-85a0-ed1a4d4adbfc tempest-ServerRescueTestJSON-985981031 tempest-ServerRescueTestJSON-985981031-project-member] [instance: ff7d9da5-bc9a-41f2-a2e4-521d87b22117] Starting instance... {{(pid=61962) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2424}} [ 795.791377] env[61962]: DEBUG oslo_concurrency.lockutils [None req-7546295f-5c69-4e76-85a0-ed1a4d4adbfc tempest-ServerRescueTestJSON-985981031 tempest-ServerRescueTestJSON-985981031-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61962) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 795.791377] env[61962]: DEBUG oslo_concurrency.lockutils [None req-7546295f-5c69-4e76-85a0-ed1a4d4adbfc tempest-ServerRescueTestJSON-985981031 tempest-ServerRescueTestJSON-985981031-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=61962) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 795.791377] env[61962]: INFO nova.compute.claims [None req-7546295f-5c69-4e76-85a0-ed1a4d4adbfc tempest-ServerRescueTestJSON-985981031 tempest-ServerRescueTestJSON-985981031-project-member] [instance: ff7d9da5-bc9a-41f2-a2e4-521d87b22117] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 796.262666] env[61962]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4862b645-cc5f-4fe3-a05e-39652142d257 {{(pid=61962) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 796.273553] env[61962]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-04144872-e13c-4fc0-8f7f-738c26ea0aa2 {{(pid=61962) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 796.315110] env[61962]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bb666e3c-a798-4c18-9173-8e77f9816480 {{(pid=61962) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 796.323384] env[61962]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-44f6daf9-fddc-451c-9b93-d6ca421ed3ae {{(pid=61962) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 796.343932] env[61962]: DEBUG nova.compute.provider_tree [None req-7546295f-5c69-4e76-85a0-ed1a4d4adbfc tempest-ServerRescueTestJSON-985981031 tempest-ServerRescueTestJSON-985981031-project-member] Inventory has not changed in ProviderTree for provider: 5a20dc57-fddd-49ec-bab5-953a03eebaa1 {{(pid=61962) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 796.361532] env[61962]: DEBUG nova.scheduler.client.report [None req-7546295f-5c69-4e76-85a0-ed1a4d4adbfc tempest-ServerRescueTestJSON-985981031 tempest-ServerRescueTestJSON-985981031-project-member] Inventory has not changed for provider 5a20dc57-fddd-49ec-bab5-953a03eebaa1 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 96, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61962) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 796.387786] env[61962]: DEBUG oslo_concurrency.lockutils [None req-7546295f-5c69-4e76-85a0-ed1a4d4adbfc tempest-ServerRescueTestJSON-985981031 tempest-ServerRescueTestJSON-985981031-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.598s {{(pid=61962) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 796.388631] env[61962]: DEBUG nova.compute.manager [None req-7546295f-5c69-4e76-85a0-ed1a4d4adbfc tempest-ServerRescueTestJSON-985981031 tempest-ServerRescueTestJSON-985981031-project-member] [instance: ff7d9da5-bc9a-41f2-a2e4-521d87b22117] Start building networks asynchronously for instance. {{(pid=61962) _build_resources /opt/stack/nova/nova/compute/manager.py:2821}} [ 796.473881] env[61962]: DEBUG nova.compute.utils [None req-7546295f-5c69-4e76-85a0-ed1a4d4adbfc tempest-ServerRescueTestJSON-985981031 tempest-ServerRescueTestJSON-985981031-project-member] Using /dev/sd instead of None {{(pid=61962) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 796.477086] env[61962]: DEBUG nova.compute.manager [None req-7546295f-5c69-4e76-85a0-ed1a4d4adbfc tempest-ServerRescueTestJSON-985981031 tempest-ServerRescueTestJSON-985981031-project-member] [instance: ff7d9da5-bc9a-41f2-a2e4-521d87b22117] Allocating IP information in the background. {{(pid=61962) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1973}} [ 796.477276] env[61962]: DEBUG nova.network.neutron [None req-7546295f-5c69-4e76-85a0-ed1a4d4adbfc tempest-ServerRescueTestJSON-985981031 tempest-ServerRescueTestJSON-985981031-project-member] [instance: ff7d9da5-bc9a-41f2-a2e4-521d87b22117] allocate_for_instance() {{(pid=61962) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 796.497063] env[61962]: DEBUG nova.compute.manager [None req-7546295f-5c69-4e76-85a0-ed1a4d4adbfc tempest-ServerRescueTestJSON-985981031 tempest-ServerRescueTestJSON-985981031-project-member] [instance: ff7d9da5-bc9a-41f2-a2e4-521d87b22117] Start building block device mappings for instance. {{(pid=61962) _build_resources /opt/stack/nova/nova/compute/manager.py:2856}} [ 796.607869] env[61962]: DEBUG nova.compute.manager [None req-7546295f-5c69-4e76-85a0-ed1a4d4adbfc tempest-ServerRescueTestJSON-985981031 tempest-ServerRescueTestJSON-985981031-project-member] [instance: ff7d9da5-bc9a-41f2-a2e4-521d87b22117] Start spawning the instance on the hypervisor. {{(pid=61962) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2630}} [ 796.626652] env[61962]: DEBUG nova.policy [None req-7546295f-5c69-4e76-85a0-ed1a4d4adbfc tempest-ServerRescueTestJSON-985981031 tempest-ServerRescueTestJSON-985981031-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '36c2e4a60886470daa9fdc82fe85b591', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '5481169540de4205a1ff7731132eb3a7', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61962) authorize /opt/stack/nova/nova/policy.py:203}} [ 796.638858] env[61962]: DEBUG nova.virt.hardware [None req-7546295f-5c69-4e76-85a0-ed1a4d4adbfc tempest-ServerRescueTestJSON-985981031 tempest-ServerRescueTestJSON-985981031-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-12-01T12:09:23Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=128,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-12-01T12:09:07Z,direct_url=,disk_format='vmdk',id=f684bb17-3ab2-4bd5-a19e-4c36c57d0ebe,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='eef556fb5c4f49b889491ae31a496de5',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-12-01T12:09:08Z,virtual_size=,visibility=), allow threads: False {{(pid=61962) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 796.639119] env[61962]: DEBUG nova.virt.hardware [None req-7546295f-5c69-4e76-85a0-ed1a4d4adbfc tempest-ServerRescueTestJSON-985981031 tempest-ServerRescueTestJSON-985981031-project-member] Flavor limits 0:0:0 {{(pid=61962) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 796.639310] env[61962]: DEBUG nova.virt.hardware [None req-7546295f-5c69-4e76-85a0-ed1a4d4adbfc tempest-ServerRescueTestJSON-985981031 tempest-ServerRescueTestJSON-985981031-project-member] Image limits 0:0:0 {{(pid=61962) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 796.639707] env[61962]: DEBUG nova.virt.hardware [None req-7546295f-5c69-4e76-85a0-ed1a4d4adbfc tempest-ServerRescueTestJSON-985981031 tempest-ServerRescueTestJSON-985981031-project-member] Flavor pref 0:0:0 {{(pid=61962) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 796.639790] env[61962]: DEBUG nova.virt.hardware [None req-7546295f-5c69-4e76-85a0-ed1a4d4adbfc tempest-ServerRescueTestJSON-985981031 tempest-ServerRescueTestJSON-985981031-project-member] Image pref 0:0:0 {{(pid=61962) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 796.639922] env[61962]: DEBUG nova.virt.hardware [None req-7546295f-5c69-4e76-85a0-ed1a4d4adbfc tempest-ServerRescueTestJSON-985981031 tempest-ServerRescueTestJSON-985981031-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61962) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 796.640439] env[61962]: DEBUG nova.virt.hardware [None req-7546295f-5c69-4e76-85a0-ed1a4d4adbfc tempest-ServerRescueTestJSON-985981031 tempest-ServerRescueTestJSON-985981031-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61962) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 796.640724] env[61962]: DEBUG nova.virt.hardware [None req-7546295f-5c69-4e76-85a0-ed1a4d4adbfc tempest-ServerRescueTestJSON-985981031 tempest-ServerRescueTestJSON-985981031-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61962) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 796.641059] env[61962]: DEBUG nova.virt.hardware [None req-7546295f-5c69-4e76-85a0-ed1a4d4adbfc tempest-ServerRescueTestJSON-985981031 tempest-ServerRescueTestJSON-985981031-project-member] Got 1 possible topologies {{(pid=61962) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 796.641636] env[61962]: DEBUG nova.virt.hardware [None req-7546295f-5c69-4e76-85a0-ed1a4d4adbfc tempest-ServerRescueTestJSON-985981031 tempest-ServerRescueTestJSON-985981031-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61962) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 796.641963] env[61962]: DEBUG nova.virt.hardware [None req-7546295f-5c69-4e76-85a0-ed1a4d4adbfc tempest-ServerRescueTestJSON-985981031 tempest-ServerRescueTestJSON-985981031-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61962) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 796.643366] env[61962]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d46ae900-e3ab-4a65-9542-77c2794446b9 {{(pid=61962) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 796.656041] env[61962]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fd111650-0443-4081-a261-ebf02a1591f4 {{(pid=61962) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 797.176334] env[61962]: DEBUG oslo_concurrency.lockutils [None req-e1d514b2-824e-487e-a10a-f30e148f321b tempest-AttachInterfacesTestJSON-87713823 tempest-AttachInterfacesTestJSON-87713823-project-member] Acquiring lock "454eebcc-ea18-4046-ba2e-01de1c8a954a" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61962) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 797.176746] env[61962]: DEBUG oslo_concurrency.lockutils [None req-e1d514b2-824e-487e-a10a-f30e148f321b tempest-AttachInterfacesTestJSON-87713823 tempest-AttachInterfacesTestJSON-87713823-project-member] Lock "454eebcc-ea18-4046-ba2e-01de1c8a954a" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61962) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 797.970612] env[61962]: DEBUG nova.network.neutron [None req-7546295f-5c69-4e76-85a0-ed1a4d4adbfc tempest-ServerRescueTestJSON-985981031 tempest-ServerRescueTestJSON-985981031-project-member] [instance: ff7d9da5-bc9a-41f2-a2e4-521d87b22117] Successfully created port: 10a845df-b691-4fd1-a910-4aa833120aaf {{(pid=61962) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 798.297973] env[61962]: DEBUG oslo_concurrency.lockutils [None req-f8f4fbcf-abb6-411e-9491-8d17bed92cc6 tempest-ServerRescueTestJSONUnderV235-1963263010 tempest-ServerRescueTestJSONUnderV235-1963263010-project-member] Acquiring lock "e76b5908-3bad-4c19-90e7-b15ca253ad80" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61962) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 798.298407] env[61962]: DEBUG oslo_concurrency.lockutils [None req-f8f4fbcf-abb6-411e-9491-8d17bed92cc6 tempest-ServerRescueTestJSONUnderV235-1963263010 tempest-ServerRescueTestJSONUnderV235-1963263010-project-member] Lock "e76b5908-3bad-4c19-90e7-b15ca253ad80" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61962) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 798.543936] env[61962]: DEBUG oslo_concurrency.lockutils [None req-87050117-bd22-4c08-b530-550627895dc9 tempest-ServerRescueTestJSON-985981031 tempest-ServerRescueTestJSON-985981031-project-member] Acquiring lock "ff7d9da5-bc9a-41f2-a2e4-521d87b22117" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=61962) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 799.057936] env[61962]: DEBUG nova.network.neutron [None req-7546295f-5c69-4e76-85a0-ed1a4d4adbfc tempest-ServerRescueTestJSON-985981031 tempest-ServerRescueTestJSON-985981031-project-member] [instance: ff7d9da5-bc9a-41f2-a2e4-521d87b22117] Successfully updated port: 10a845df-b691-4fd1-a910-4aa833120aaf {{(pid=61962) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 799.098007] env[61962]: DEBUG oslo_concurrency.lockutils [None req-7546295f-5c69-4e76-85a0-ed1a4d4adbfc tempest-ServerRescueTestJSON-985981031 tempest-ServerRescueTestJSON-985981031-project-member] Acquiring lock "refresh_cache-ff7d9da5-bc9a-41f2-a2e4-521d87b22117" {{(pid=61962) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 799.098468] env[61962]: DEBUG oslo_concurrency.lockutils [None req-7546295f-5c69-4e76-85a0-ed1a4d4adbfc tempest-ServerRescueTestJSON-985981031 tempest-ServerRescueTestJSON-985981031-project-member] Acquired lock "refresh_cache-ff7d9da5-bc9a-41f2-a2e4-521d87b22117" {{(pid=61962) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 799.098636] env[61962]: DEBUG nova.network.neutron [None req-7546295f-5c69-4e76-85a0-ed1a4d4adbfc tempest-ServerRescueTestJSON-985981031 tempest-ServerRescueTestJSON-985981031-project-member] [instance: ff7d9da5-bc9a-41f2-a2e4-521d87b22117] Building network info cache for instance {{(pid=61962) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 799.160334] env[61962]: DEBUG oslo_service.periodic_task [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Running periodic task ComputeManager._run_pending_deletes {{(pid=61962) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 799.160538] env[61962]: DEBUG nova.compute.manager [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Cleaning up deleted instances {{(pid=61962) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11307}} [ 799.175442] env[61962]: DEBUG nova.compute.manager [None req-ba337279-4320-40ca-b616-7e590432f203 None None] There are 0 instances to clean {{(pid=61962) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11316}} [ 799.175660] env[61962]: DEBUG oslo_service.periodic_task [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Running periodic task ComputeManager._cleanup_incomplete_migrations {{(pid=61962) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 799.175826] env[61962]: DEBUG nova.compute.manager [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Cleaning up deleted instances with incomplete migration {{(pid=61962) _cleanup_incomplete_migrations /opt/stack/nova/nova/compute/manager.py:11345}} [ 799.178715] env[61962]: DEBUG nova.network.neutron [None req-7546295f-5c69-4e76-85a0-ed1a4d4adbfc tempest-ServerRescueTestJSON-985981031 tempest-ServerRescueTestJSON-985981031-project-member] [instance: ff7d9da5-bc9a-41f2-a2e4-521d87b22117] Instance cache missing network info. {{(pid=61962) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 799.190062] env[61962]: DEBUG oslo_service.periodic_task [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Running periodic task ComputeManager._cleanup_expired_console_auth_tokens {{(pid=61962) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 799.459908] env[61962]: DEBUG nova.network.neutron [None req-7546295f-5c69-4e76-85a0-ed1a4d4adbfc tempest-ServerRescueTestJSON-985981031 tempest-ServerRescueTestJSON-985981031-project-member] [instance: ff7d9da5-bc9a-41f2-a2e4-521d87b22117] Updating instance_info_cache with network_info: [{"id": "10a845df-b691-4fd1-a910-4aa833120aaf", "address": "fa:16:3e:b3:52:4e", "network": {"id": "967e5a80-062b-45e7-8013-c8b48a29d2d6", "bridge": "br-int", "label": "tempest-ServerRescueTestJSON-413686463-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": false}}], "meta": {"injected": false, "tenant_id": "5481169540de4205a1ff7731132eb3a7", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "415e68b4-3766-4359-afe2-f8563910d98c", "external-id": "nsx-vlan-transportzone-538", "segmentation_id": 538, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap10a845df-b6", "ovs_interfaceid": "10a845df-b691-4fd1-a910-4aa833120aaf", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61962) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 799.486065] env[61962]: DEBUG oslo_concurrency.lockutils [None req-7546295f-5c69-4e76-85a0-ed1a4d4adbfc tempest-ServerRescueTestJSON-985981031 tempest-ServerRescueTestJSON-985981031-project-member] Releasing lock "refresh_cache-ff7d9da5-bc9a-41f2-a2e4-521d87b22117" {{(pid=61962) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 799.486449] env[61962]: DEBUG nova.compute.manager [None req-7546295f-5c69-4e76-85a0-ed1a4d4adbfc tempest-ServerRescueTestJSON-985981031 tempest-ServerRescueTestJSON-985981031-project-member] [instance: ff7d9da5-bc9a-41f2-a2e4-521d87b22117] Instance network_info: |[{"id": "10a845df-b691-4fd1-a910-4aa833120aaf", "address": "fa:16:3e:b3:52:4e", "network": {"id": "967e5a80-062b-45e7-8013-c8b48a29d2d6", "bridge": "br-int", "label": "tempest-ServerRescueTestJSON-413686463-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": false}}], "meta": {"injected": false, "tenant_id": "5481169540de4205a1ff7731132eb3a7", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "415e68b4-3766-4359-afe2-f8563910d98c", "external-id": "nsx-vlan-transportzone-538", "segmentation_id": 538, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap10a845df-b6", "ovs_interfaceid": "10a845df-b691-4fd1-a910-4aa833120aaf", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=61962) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 799.487149] env[61962]: DEBUG nova.virt.vmwareapi.vmops [None req-7546295f-5c69-4e76-85a0-ed1a4d4adbfc tempest-ServerRescueTestJSON-985981031 tempest-ServerRescueTestJSON-985981031-project-member] [instance: ff7d9da5-bc9a-41f2-a2e4-521d87b22117] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:b3:52:4e', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '415e68b4-3766-4359-afe2-f8563910d98c', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '10a845df-b691-4fd1-a910-4aa833120aaf', 'vif_model': 'vmxnet3'}] {{(pid=61962) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 799.495827] env[61962]: DEBUG nova.virt.vmwareapi.vm_util [None req-7546295f-5c69-4e76-85a0-ed1a4d4adbfc tempest-ServerRescueTestJSON-985981031 tempest-ServerRescueTestJSON-985981031-project-member] Creating folder: Project (5481169540de4205a1ff7731132eb3a7). Parent ref: group-v953327. {{(pid=61962) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 799.496046] env[61962]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-bb790a27-4309-41dd-996d-20c80af8d7cc {{(pid=61962) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 799.507145] env[61962]: INFO nova.virt.vmwareapi.vm_util [None req-7546295f-5c69-4e76-85a0-ed1a4d4adbfc tempest-ServerRescueTestJSON-985981031 tempest-ServerRescueTestJSON-985981031-project-member] Created folder: Project (5481169540de4205a1ff7731132eb3a7) in parent group-v953327. [ 799.507378] env[61962]: DEBUG nova.virt.vmwareapi.vm_util [None req-7546295f-5c69-4e76-85a0-ed1a4d4adbfc tempest-ServerRescueTestJSON-985981031 tempest-ServerRescueTestJSON-985981031-project-member] Creating folder: Instances. Parent ref: group-v953372. {{(pid=61962) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 799.507584] env[61962]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-d0d12a5f-5efc-4359-83ad-cb36a3c2da5e {{(pid=61962) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 799.517305] env[61962]: INFO nova.virt.vmwareapi.vm_util [None req-7546295f-5c69-4e76-85a0-ed1a4d4adbfc tempest-ServerRescueTestJSON-985981031 tempest-ServerRescueTestJSON-985981031-project-member] Created folder: Instances in parent group-v953372. [ 799.517511] env[61962]: DEBUG oslo.service.loopingcall [None req-7546295f-5c69-4e76-85a0-ed1a4d4adbfc tempest-ServerRescueTestJSON-985981031 tempest-ServerRescueTestJSON-985981031-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=61962) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 799.517677] env[61962]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: ff7d9da5-bc9a-41f2-a2e4-521d87b22117] Creating VM on the ESX host {{(pid=61962) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 799.517883] env[61962]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-4543f234-111f-4d79-acca-c39031bb0872 {{(pid=61962) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 799.539528] env[61962]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 799.539528] env[61962]: value = "task-4891967" [ 799.539528] env[61962]: _type = "Task" [ 799.539528] env[61962]: } to complete. {{(pid=61962) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 799.547283] env[61962]: DEBUG oslo_vmware.api [-] Task: {'id': task-4891967, 'name': CreateVM_Task} progress is 0%. {{(pid=61962) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 799.842277] env[61962]: DEBUG nova.compute.manager [req-509a6bd7-9c3b-434a-9466-ae5a9ad99816 req-7bdd9672-6ac8-4ede-872f-91caccc4f8ee service nova] [instance: ff7d9da5-bc9a-41f2-a2e4-521d87b22117] Received event network-vif-plugged-10a845df-b691-4fd1-a910-4aa833120aaf {{(pid=61962) external_instance_event /opt/stack/nova/nova/compute/manager.py:11210}} [ 799.842277] env[61962]: DEBUG oslo_concurrency.lockutils [req-509a6bd7-9c3b-434a-9466-ae5a9ad99816 req-7bdd9672-6ac8-4ede-872f-91caccc4f8ee service nova] Acquiring lock "ff7d9da5-bc9a-41f2-a2e4-521d87b22117-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=61962) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 799.842277] env[61962]: DEBUG oslo_concurrency.lockutils [req-509a6bd7-9c3b-434a-9466-ae5a9ad99816 req-7bdd9672-6ac8-4ede-872f-91caccc4f8ee service nova] Lock "ff7d9da5-bc9a-41f2-a2e4-521d87b22117-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=61962) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 799.842277] env[61962]: DEBUG oslo_concurrency.lockutils [req-509a6bd7-9c3b-434a-9466-ae5a9ad99816 req-7bdd9672-6ac8-4ede-872f-91caccc4f8ee service nova] Lock "ff7d9da5-bc9a-41f2-a2e4-521d87b22117-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=61962) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 799.842457] env[61962]: DEBUG nova.compute.manager [req-509a6bd7-9c3b-434a-9466-ae5a9ad99816 req-7bdd9672-6ac8-4ede-872f-91caccc4f8ee service nova] [instance: ff7d9da5-bc9a-41f2-a2e4-521d87b22117] No waiting events found dispatching network-vif-plugged-10a845df-b691-4fd1-a910-4aa833120aaf {{(pid=61962) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 799.842552] env[61962]: WARNING nova.compute.manager [req-509a6bd7-9c3b-434a-9466-ae5a9ad99816 req-7bdd9672-6ac8-4ede-872f-91caccc4f8ee service nova] [instance: ff7d9da5-bc9a-41f2-a2e4-521d87b22117] Received unexpected event network-vif-plugged-10a845df-b691-4fd1-a910-4aa833120aaf for instance with vm_state building and task_state deleting. [ 800.052125] env[61962]: DEBUG oslo_vmware.api [-] Task: {'id': task-4891967, 'name': CreateVM_Task, 'duration_secs': 0.398838} completed successfully. {{(pid=61962) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 800.052405] env[61962]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: ff7d9da5-bc9a-41f2-a2e4-521d87b22117] Created VM on the ESX host {{(pid=61962) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 800.054430] env[61962]: DEBUG oslo_concurrency.lockutils [None req-7546295f-5c69-4e76-85a0-ed1a4d4adbfc tempest-ServerRescueTestJSON-985981031 tempest-ServerRescueTestJSON-985981031-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/f684bb17-3ab2-4bd5-a19e-4c36c57d0ebe" {{(pid=61962) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 800.054594] env[61962]: DEBUG oslo_concurrency.lockutils [None req-7546295f-5c69-4e76-85a0-ed1a4d4adbfc tempest-ServerRescueTestJSON-985981031 tempest-ServerRescueTestJSON-985981031-project-member] Acquired lock "[datastore2] devstack-image-cache_base/f684bb17-3ab2-4bd5-a19e-4c36c57d0ebe" {{(pid=61962) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 800.054929] env[61962]: DEBUG oslo_concurrency.lockutils [None req-7546295f-5c69-4e76-85a0-ed1a4d4adbfc tempest-ServerRescueTestJSON-985981031 tempest-ServerRescueTestJSON-985981031-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/f684bb17-3ab2-4bd5-a19e-4c36c57d0ebe" {{(pid=61962) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 800.055221] env[61962]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-5dc939da-8b84-4feb-90cb-07af6a6d6434 {{(pid=61962) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 800.062316] env[61962]: DEBUG oslo_vmware.api [None req-7546295f-5c69-4e76-85a0-ed1a4d4adbfc tempest-ServerRescueTestJSON-985981031 tempest-ServerRescueTestJSON-985981031-project-member] Waiting for the task: (returnval){ [ 800.062316] env[61962]: value = "session[5210918f-aadc-9b29-42fa-0929c2e42627]525f40d9-5b95-30f7-22fc-87f2b9e69de7" [ 800.062316] env[61962]: _type = "Task" [ 800.062316] env[61962]: } to complete. {{(pid=61962) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 800.072884] env[61962]: DEBUG oslo_vmware.api [None req-7546295f-5c69-4e76-85a0-ed1a4d4adbfc tempest-ServerRescueTestJSON-985981031 tempest-ServerRescueTestJSON-985981031-project-member] Task: {'id': session[5210918f-aadc-9b29-42fa-0929c2e42627]525f40d9-5b95-30f7-22fc-87f2b9e69de7, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61962) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 800.199358] env[61962]: DEBUG oslo_service.periodic_task [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=61962) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 800.199531] env[61962]: DEBUG nova.compute.manager [None req-ba337279-4320-40ca-b616-7e590432f203 None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=61962) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10626}} [ 800.199724] env[61962]: DEBUG oslo_service.periodic_task [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Running periodic task ComputeManager.update_available_resource {{(pid=61962) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 800.211318] env[61962]: DEBUG oslo_concurrency.lockutils [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=61962) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 800.211537] env[61962]: DEBUG oslo_concurrency.lockutils [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=61962) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 800.211715] env[61962]: DEBUG oslo_concurrency.lockutils [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=61962) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 800.211867] env[61962]: DEBUG nova.compute.resource_tracker [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=61962) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 800.213483] env[61962]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fb05076b-5cc7-451d-b9bc-f2fc69580a8c {{(pid=61962) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 800.223582] env[61962]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-87655eb8-6f2d-4cc6-a458-ce09d8e2f6ed {{(pid=61962) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 800.239852] env[61962]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c85b7aa2-7b7c-441b-8dd2-e109097b9c62 {{(pid=61962) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 800.248975] env[61962]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bc9ce999-55b2-4053-8bd3-6cbddccd9ebf {{(pid=61962) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 800.282225] env[61962]: DEBUG nova.compute.resource_tracker [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=180570MB free_disk=96GB free_vcpus=48 pci_devices=None {{(pid=61962) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 800.282225] env[61962]: DEBUG oslo_concurrency.lockutils [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=61962) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 800.282333] env[61962]: DEBUG oslo_concurrency.lockutils [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=61962) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 800.400728] env[61962]: DEBUG nova.compute.resource_tracker [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Instance 1a7d5a5f-c5b1-40d2-828b-090fb8e76e3d actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61962) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 800.400935] env[61962]: DEBUG nova.compute.resource_tracker [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Instance d0df932e-822b-4c60-a588-445aa2f14cdb actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61962) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 800.401111] env[61962]: DEBUG nova.compute.resource_tracker [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Instance a6e6f30d-c667-4f70-bf60-596a8fe97188 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61962) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 800.401271] env[61962]: DEBUG nova.compute.resource_tracker [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Instance b9782756-27e1-46fc-940f-069cfb9ab9f9 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61962) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 800.401513] env[61962]: DEBUG nova.compute.resource_tracker [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Instance 667f56c0-896f-4e25-9e8a-3954f7733dc0 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61962) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 800.401572] env[61962]: DEBUG nova.compute.resource_tracker [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Instance 295064f0-9677-4287-877a-ef2a33a87fef actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61962) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 800.401732] env[61962]: DEBUG nova.compute.resource_tracker [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Instance 6996289c-f881-4733-8943-98e3633e43ca actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61962) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 800.401881] env[61962]: DEBUG nova.compute.resource_tracker [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Instance 85c6b6af-f1b5-47ca-8e02-61c3669ec02b actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61962) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 800.402067] env[61962]: DEBUG nova.compute.resource_tracker [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Instance 0aa340f7-a219-47f5-9d7d-dd4062643cf7 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61962) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 800.402465] env[61962]: DEBUG nova.compute.resource_tracker [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Instance ff7d9da5-bc9a-41f2-a2e4-521d87b22117 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61962) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 800.416864] env[61962]: DEBUG nova.compute.resource_tracker [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Instance 2dad2cb0-56e2-4fb6-b540-0fd98265697f has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61962) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1767}} [ 800.428467] env[61962]: DEBUG nova.compute.resource_tracker [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Instance 8436449e-8f8d-4fd0-96be-31f1464f03ae has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61962) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1767}} [ 800.441948] env[61962]: DEBUG nova.compute.resource_tracker [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Instance 92f66377-ddec-4312-b10c-f7edc4855d48 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61962) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1767}} [ 800.453508] env[61962]: DEBUG nova.compute.resource_tracker [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Instance 17bc2689-2250-48bc-bc0e-47821b20d3ba has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61962) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1767}} [ 800.467972] env[61962]: DEBUG nova.compute.resource_tracker [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Instance 146b08c3-3893-4c0e-9448-05a988c19ae8 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61962) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1767}} [ 800.479856] env[61962]: DEBUG nova.compute.resource_tracker [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Instance 2ef4b06a-c5c6-46ed-89d8-30143aa30a25 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61962) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1767}} [ 800.496561] env[61962]: DEBUG nova.compute.resource_tracker [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Instance 2142b6e8-d060-48eb-8ff1-3e92e6188a83 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61962) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1767}} [ 800.507568] env[61962]: DEBUG nova.compute.resource_tracker [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Instance dbe5937a-cf98-4abc-ad5a-521762e0be9d has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61962) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1767}} [ 800.520955] env[61962]: DEBUG nova.compute.resource_tracker [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Instance b387b903-2f5c-40ad-aeea-b013fb66e17d has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61962) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1767}} [ 800.538683] env[61962]: DEBUG nova.compute.resource_tracker [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Instance bf82c44c-95ce-4e4c-a32f-444bba1ef5cd has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61962) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1767}} [ 800.553956] env[61962]: DEBUG nova.compute.resource_tracker [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Instance 1dcafe3b-989b-4290-be54-ceccc62a323f has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61962) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1767}} [ 800.567439] env[61962]: DEBUG nova.compute.resource_tracker [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Instance 3bbd6215-436e-4d30-9c07-b3f69002f3bf has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61962) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1767}} [ 800.575036] env[61962]: DEBUG oslo_concurrency.lockutils [None req-7546295f-5c69-4e76-85a0-ed1a4d4adbfc tempest-ServerRescueTestJSON-985981031 tempest-ServerRescueTestJSON-985981031-project-member] Releasing lock "[datastore2] devstack-image-cache_base/f684bb17-3ab2-4bd5-a19e-4c36c57d0ebe" {{(pid=61962) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 800.575389] env[61962]: DEBUG nova.virt.vmwareapi.vmops [None req-7546295f-5c69-4e76-85a0-ed1a4d4adbfc tempest-ServerRescueTestJSON-985981031 tempest-ServerRescueTestJSON-985981031-project-member] [instance: ff7d9da5-bc9a-41f2-a2e4-521d87b22117] Processing image f684bb17-3ab2-4bd5-a19e-4c36c57d0ebe {{(pid=61962) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 800.575564] env[61962]: DEBUG oslo_concurrency.lockutils [None req-7546295f-5c69-4e76-85a0-ed1a4d4adbfc tempest-ServerRescueTestJSON-985981031 tempest-ServerRescueTestJSON-985981031-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/f684bb17-3ab2-4bd5-a19e-4c36c57d0ebe/f684bb17-3ab2-4bd5-a19e-4c36c57d0ebe.vmdk" {{(pid=61962) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 800.582715] env[61962]: DEBUG nova.compute.resource_tracker [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Instance 454eebcc-ea18-4046-ba2e-01de1c8a954a has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61962) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1767}} [ 800.593616] env[61962]: DEBUG nova.compute.resource_tracker [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Instance e76b5908-3bad-4c19-90e7-b15ca253ad80 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61962) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1767}} [ 800.593863] env[61962]: DEBUG nova.compute.resource_tracker [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Total usable vcpus: 48, total allocated vcpus: 10 {{(pid=61962) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 800.594076] env[61962]: DEBUG nova.compute.resource_tracker [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=1792MB phys_disk=100GB used_disk=10GB total_vcpus=48 used_vcpus=10 pci_stats=[] stats={'failed_builds': '13', 'num_instances': '10', 'num_vm_building': '10', 'num_task_deleting': '10', 'num_os_type_None': '10', 'num_proj_13cf9ead87c749fc963fbe35b22b4fd6': '1', 'io_workload': '10', 'num_proj_e58b68272c404983993a09ad12e8c03b': '1', 'num_proj_6e7e747becdf4976b570b79d4b20d88b': '1', 'num_proj_a7b748ae1e6d4e17bc463811008c5596': '1', 'num_proj_6c00b1a76e464ad2a9788673e256de22': '1', 'num_proj_78e44aa9cd5642928627210447b3e177': '1', 'num_proj_b4cf0a2155644882b4bf42e69e210138': '1', 'num_proj_ed943c69c73d48e2b1b2bab311295d84': '2', 'num_proj_5481169540de4205a1ff7731132eb3a7': '1'} {{(pid=61962) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 800.964883] env[61962]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d801d8e0-4b33-452d-8a48-fd8434cbac7b {{(pid=61962) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 800.973805] env[61962]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0ffd5dff-009c-4544-a901-ca12ec31f47a {{(pid=61962) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 801.005726] env[61962]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b6837174-61c3-4653-ab40-c985875a7bde {{(pid=61962) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 801.014407] env[61962]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7dd10698-fe10-484f-81e7-24a5def71d37 {{(pid=61962) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 801.029159] env[61962]: DEBUG nova.compute.provider_tree [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Inventory has not changed in ProviderTree for provider: 5a20dc57-fddd-49ec-bab5-953a03eebaa1 {{(pid=61962) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 801.042091] env[61962]: DEBUG nova.scheduler.client.report [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Inventory has not changed for provider 5a20dc57-fddd-49ec-bab5-953a03eebaa1 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 96, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61962) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 801.065118] env[61962]: DEBUG nova.compute.resource_tracker [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=61962) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 801.065337] env[61962]: DEBUG oslo_concurrency.lockutils [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 0.783s {{(pid=61962) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 801.206660] env[61962]: DEBUG oslo_concurrency.lockutils [None req-0175fd8f-fe26-4651-8a2d-2c4f726c7c6a tempest-ListImageFiltersTestJSON-1372204851 tempest-ListImageFiltersTestJSON-1372204851-project-member] Acquiring lock "c17b6733-edea-4db9-b937-c38585eb92bf" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61962) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 801.206919] env[61962]: DEBUG oslo_concurrency.lockutils [None req-0175fd8f-fe26-4651-8a2d-2c4f726c7c6a tempest-ListImageFiltersTestJSON-1372204851 tempest-ListImageFiltersTestJSON-1372204851-project-member] Lock "c17b6733-edea-4db9-b937-c38585eb92bf" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61962) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 801.298175] env[61962]: DEBUG oslo_concurrency.lockutils [None req-f7b0ed63-6aaa-45c6-9e72-e7a859bd8ce6 tempest-AttachInterfacesUnderV243Test-760923567 tempest-AttachInterfacesUnderV243Test-760923567-project-member] Acquiring lock "1228fef1-3837-4a1b-8c88-fc1e27af6b4f" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61962) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 801.298438] env[61962]: DEBUG oslo_concurrency.lockutils [None req-f7b0ed63-6aaa-45c6-9e72-e7a859bd8ce6 tempest-AttachInterfacesUnderV243Test-760923567 tempest-AttachInterfacesUnderV243Test-760923567-project-member] Lock "1228fef1-3837-4a1b-8c88-fc1e27af6b4f" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61962) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 802.025971] env[61962]: DEBUG oslo_service.periodic_task [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=61962) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 802.153443] env[61962]: DEBUG oslo_service.periodic_task [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=61962) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 802.427295] env[61962]: DEBUG oslo_concurrency.lockutils [None req-26263581-648d-4e26-9469-10ed46e2edfa tempest-ListImageFiltersTestJSON-1372204851 tempest-ListImageFiltersTestJSON-1372204851-project-member] Acquiring lock "66eb1344-b692-4f1c-a5ac-b3518e350741" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61962) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 802.427604] env[61962]: DEBUG oslo_concurrency.lockutils [None req-26263581-648d-4e26-9469-10ed46e2edfa tempest-ListImageFiltersTestJSON-1372204851 tempest-ListImageFiltersTestJSON-1372204851-project-member] Lock "66eb1344-b692-4f1c-a5ac-b3518e350741" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=61962) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 802.635514] env[61962]: DEBUG nova.compute.manager [req-6e790426-b7a0-4e39-8029-fdb16216dbe8 req-b1840a17-e765-4181-a3c2-1469de956aa5 service nova] [instance: ff7d9da5-bc9a-41f2-a2e4-521d87b22117] Received event network-changed-10a845df-b691-4fd1-a910-4aa833120aaf {{(pid=61962) external_instance_event /opt/stack/nova/nova/compute/manager.py:11210}} [ 802.635514] env[61962]: DEBUG nova.compute.manager [req-6e790426-b7a0-4e39-8029-fdb16216dbe8 req-b1840a17-e765-4181-a3c2-1469de956aa5 service nova] [instance: ff7d9da5-bc9a-41f2-a2e4-521d87b22117] Refreshing instance network info cache due to event network-changed-10a845df-b691-4fd1-a910-4aa833120aaf. {{(pid=61962) external_instance_event /opt/stack/nova/nova/compute/manager.py:11215}} [ 802.635514] env[61962]: DEBUG oslo_concurrency.lockutils [req-6e790426-b7a0-4e39-8029-fdb16216dbe8 req-b1840a17-e765-4181-a3c2-1469de956aa5 service nova] Acquiring lock "refresh_cache-ff7d9da5-bc9a-41f2-a2e4-521d87b22117" {{(pid=61962) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 802.635514] env[61962]: DEBUG oslo_concurrency.lockutils [req-6e790426-b7a0-4e39-8029-fdb16216dbe8 req-b1840a17-e765-4181-a3c2-1469de956aa5 service nova] Acquired lock "refresh_cache-ff7d9da5-bc9a-41f2-a2e4-521d87b22117" {{(pid=61962) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 802.635514] env[61962]: DEBUG nova.network.neutron [req-6e790426-b7a0-4e39-8029-fdb16216dbe8 req-b1840a17-e765-4181-a3c2-1469de956aa5 service nova] [instance: ff7d9da5-bc9a-41f2-a2e4-521d87b22117] Refreshing network info cache for port 10a845df-b691-4fd1-a910-4aa833120aaf {{(pid=61962) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 803.158576] env[61962]: DEBUG oslo_service.periodic_task [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=61962) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 803.471031] env[61962]: DEBUG nova.network.neutron [req-6e790426-b7a0-4e39-8029-fdb16216dbe8 req-b1840a17-e765-4181-a3c2-1469de956aa5 service nova] [instance: ff7d9da5-bc9a-41f2-a2e4-521d87b22117] Updated VIF entry in instance network info cache for port 10a845df-b691-4fd1-a910-4aa833120aaf. {{(pid=61962) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 803.471404] env[61962]: DEBUG nova.network.neutron [req-6e790426-b7a0-4e39-8029-fdb16216dbe8 req-b1840a17-e765-4181-a3c2-1469de956aa5 service nova] [instance: ff7d9da5-bc9a-41f2-a2e4-521d87b22117] Updating instance_info_cache with network_info: [{"id": "10a845df-b691-4fd1-a910-4aa833120aaf", "address": "fa:16:3e:b3:52:4e", "network": {"id": "967e5a80-062b-45e7-8013-c8b48a29d2d6", "bridge": "br-int", "label": "tempest-ServerRescueTestJSON-413686463-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": false}}], "meta": {"injected": false, "tenant_id": "5481169540de4205a1ff7731132eb3a7", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "415e68b4-3766-4359-afe2-f8563910d98c", "external-id": "nsx-vlan-transportzone-538", "segmentation_id": 538, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap10a845df-b6", "ovs_interfaceid": "10a845df-b691-4fd1-a910-4aa833120aaf", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61962) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 803.482967] env[61962]: DEBUG oslo_concurrency.lockutils [req-6e790426-b7a0-4e39-8029-fdb16216dbe8 req-b1840a17-e765-4181-a3c2-1469de956aa5 service nova] Releasing lock "refresh_cache-ff7d9da5-bc9a-41f2-a2e4-521d87b22117" {{(pid=61962) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 804.159212] env[61962]: DEBUG oslo_service.periodic_task [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=61962) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 804.159487] env[61962]: DEBUG nova.compute.manager [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Starting heal instance info cache {{(pid=61962) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10007}} [ 804.159487] env[61962]: DEBUG nova.compute.manager [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Rebuilding the list of instances to heal {{(pid=61962) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10011}} [ 804.189158] env[61962]: DEBUG nova.compute.manager [None req-ba337279-4320-40ca-b616-7e590432f203 None None] [instance: 1a7d5a5f-c5b1-40d2-828b-090fb8e76e3d] Skipping network cache update for instance because it is Building. {{(pid=61962) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10020}} [ 804.189720] env[61962]: DEBUG nova.compute.manager [None req-ba337279-4320-40ca-b616-7e590432f203 None None] [instance: d0df932e-822b-4c60-a588-445aa2f14cdb] Skipping network cache update for instance because it is Building. {{(pid=61962) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10020}} [ 804.190059] env[61962]: DEBUG nova.compute.manager [None req-ba337279-4320-40ca-b616-7e590432f203 None None] [instance: a6e6f30d-c667-4f70-bf60-596a8fe97188] Skipping network cache update for instance because it is Building. {{(pid=61962) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10020}} [ 804.190317] env[61962]: DEBUG nova.compute.manager [None req-ba337279-4320-40ca-b616-7e590432f203 None None] [instance: b9782756-27e1-46fc-940f-069cfb9ab9f9] Skipping network cache update for instance because it is Building. {{(pid=61962) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10020}} [ 804.190926] env[61962]: DEBUG nova.compute.manager [None req-ba337279-4320-40ca-b616-7e590432f203 None None] [instance: 667f56c0-896f-4e25-9e8a-3954f7733dc0] Skipping network cache update for instance because it is Building. {{(pid=61962) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10020}} [ 804.191199] env[61962]: DEBUG nova.compute.manager [None req-ba337279-4320-40ca-b616-7e590432f203 None None] [instance: 295064f0-9677-4287-877a-ef2a33a87fef] Skipping network cache update for instance because it is Building. {{(pid=61962) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10020}} [ 804.191433] env[61962]: DEBUG nova.compute.manager [None req-ba337279-4320-40ca-b616-7e590432f203 None None] [instance: 6996289c-f881-4733-8943-98e3633e43ca] Skipping network cache update for instance because it is Building. {{(pid=61962) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10020}} [ 804.191657] env[61962]: DEBUG nova.compute.manager [None req-ba337279-4320-40ca-b616-7e590432f203 None None] [instance: 85c6b6af-f1b5-47ca-8e02-61c3669ec02b] Skipping network cache update for instance because it is Building. {{(pid=61962) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10020}} [ 804.191897] env[61962]: DEBUG nova.compute.manager [None req-ba337279-4320-40ca-b616-7e590432f203 None None] [instance: 0aa340f7-a219-47f5-9d7d-dd4062643cf7] Skipping network cache update for instance because it is Building. {{(pid=61962) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10020}} [ 804.192142] env[61962]: DEBUG nova.compute.manager [None req-ba337279-4320-40ca-b616-7e590432f203 None None] [instance: ff7d9da5-bc9a-41f2-a2e4-521d87b22117] Skipping network cache update for instance because it is Building. {{(pid=61962) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10020}} [ 804.192362] env[61962]: DEBUG nova.compute.manager [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Didn't find any instances for network info cache update. {{(pid=61962) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10093}} [ 804.193384] env[61962]: DEBUG oslo_service.periodic_task [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=61962) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 805.159312] env[61962]: DEBUG oslo_service.periodic_task [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=61962) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 806.158593] env[61962]: DEBUG oslo_service.periodic_task [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=61962) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 807.634577] env[61962]: DEBUG oslo_concurrency.lockutils [None req-a2e8f290-e090-4273-9cff-81aa5b83e324 tempest-ServerShowV257Test-37650740 tempest-ServerShowV257Test-37650740-project-member] Acquiring lock "b33ecac7-cb2f-438c-9ee4-7cd53388e318" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61962) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 807.634577] env[61962]: DEBUG oslo_concurrency.lockutils [None req-a2e8f290-e090-4273-9cff-81aa5b83e324 tempest-ServerShowV257Test-37650740 tempest-ServerShowV257Test-37650740-project-member] Lock "b33ecac7-cb2f-438c-9ee4-7cd53388e318" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61962) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 808.450054] env[61962]: DEBUG oslo_concurrency.lockutils [None req-253842ad-c195-4a95-92ed-04dcd3f5a28e tempest-SecurityGroupsTestJSON-26720826 tempest-SecurityGroupsTestJSON-26720826-project-member] Acquiring lock "252f4add-476d-46bc-b369-c8d66a512142" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61962) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 808.450324] env[61962]: DEBUG oslo_concurrency.lockutils [None req-253842ad-c195-4a95-92ed-04dcd3f5a28e tempest-SecurityGroupsTestJSON-26720826 tempest-SecurityGroupsTestJSON-26720826-project-member] Lock "252f4add-476d-46bc-b369-c8d66a512142" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61962) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 808.991649] env[61962]: DEBUG oslo_concurrency.lockutils [None req-286ae033-ae0a-4fe3-af73-4278631f960b tempest-ServerMetadataNegativeTestJSON-1558107376 tempest-ServerMetadataNegativeTestJSON-1558107376-project-member] Acquiring lock "647d141e-1aaf-4e5e-b0a3-31c9bee0adf7" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61962) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 808.991969] env[61962]: DEBUG oslo_concurrency.lockutils [None req-286ae033-ae0a-4fe3-af73-4278631f960b tempest-ServerMetadataNegativeTestJSON-1558107376 tempest-ServerMetadataNegativeTestJSON-1558107376-project-member] Lock "647d141e-1aaf-4e5e-b0a3-31c9bee0adf7" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61962) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 812.859238] env[61962]: DEBUG oslo_concurrency.lockutils [None req-0d94c3c7-4f36-4ce6-a743-2e7b40d62c83 tempest-ServersAaction247Test-1090469211 tempest-ServersAaction247Test-1090469211-project-member] Acquiring lock "fb33811b-242a-4cce-ae11-c004e507e7de" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61962) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 812.859584] env[61962]: DEBUG oslo_concurrency.lockutils [None req-0d94c3c7-4f36-4ce6-a743-2e7b40d62c83 tempest-ServersAaction247Test-1090469211 tempest-ServersAaction247Test-1090469211-project-member] Lock "fb33811b-242a-4cce-ae11-c004e507e7de" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61962) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 817.767807] env[61962]: DEBUG oslo_concurrency.lockutils [None req-a0c5d3ad-fa00-4fd1-9a99-033a2c9ba886 tempest-ServerAddressesNegativeTestJSON-1340443941 tempest-ServerAddressesNegativeTestJSON-1340443941-project-member] Acquiring lock "c5eca2f9-f569-4510-882b-217c2c28ec69" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61962) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 817.768211] env[61962]: DEBUG oslo_concurrency.lockutils [None req-a0c5d3ad-fa00-4fd1-9a99-033a2c9ba886 tempest-ServerAddressesNegativeTestJSON-1340443941 tempest-ServerAddressesNegativeTestJSON-1340443941-project-member] Lock "c5eca2f9-f569-4510-882b-217c2c28ec69" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61962) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 819.011023] env[61962]: DEBUG oslo_concurrency.lockutils [None req-437f1d6e-ae3f-4ed0-b52a-453235395d7b tempest-ServersTestMultiNic-318652074 tempest-ServersTestMultiNic-318652074-project-member] Acquiring lock "684d7213-d30a-4881-8e13-9817d6765074" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61962) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 819.011023] env[61962]: DEBUG oslo_concurrency.lockutils [None req-437f1d6e-ae3f-4ed0-b52a-453235395d7b tempest-ServersTestMultiNic-318652074 tempest-ServersTestMultiNic-318652074-project-member] Lock "684d7213-d30a-4881-8e13-9817d6765074" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61962) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 825.938278] env[61962]: DEBUG oslo_concurrency.lockutils [None req-74d29d12-cd52-44c6-8ebf-65c67fcd6be5 tempest-AttachVolumeTestJSON-1002857752 tempest-AttachVolumeTestJSON-1002857752-project-member] Acquiring lock "9cf159b3-53c8-4002-9e9b-5c0f82e1e555" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61962) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 825.938605] env[61962]: DEBUG oslo_concurrency.lockutils [None req-74d29d12-cd52-44c6-8ebf-65c67fcd6be5 tempest-AttachVolumeTestJSON-1002857752 tempest-AttachVolumeTestJSON-1002857752-project-member] Lock "9cf159b3-53c8-4002-9e9b-5c0f82e1e555" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61962) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 831.690378] env[61962]: DEBUG oslo_concurrency.lockutils [None req-26c84f68-530e-4672-82ee-12b17ec1b4aa tempest-ServerDiskConfigTestJSON-1190480345 tempest-ServerDiskConfigTestJSON-1190480345-project-member] Acquiring lock "306d093c-40fa-4863-b725-b3fc65042dce" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61962) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 831.690934] env[61962]: DEBUG oslo_concurrency.lockutils [None req-26c84f68-530e-4672-82ee-12b17ec1b4aa tempest-ServerDiskConfigTestJSON-1190480345 tempest-ServerDiskConfigTestJSON-1190480345-project-member] Lock "306d093c-40fa-4863-b725-b3fc65042dce" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.005s {{(pid=61962) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 831.695757] env[61962]: DEBUG oslo_concurrency.lockutils [None req-36bc805c-1195-4d2a-9935-feb61c69e14d tempest-InstanceActionsTestJSON-347250734 tempest-InstanceActionsTestJSON-347250734-project-member] Acquiring lock "f53ed31d-4cca-418d-b708-4797dbc5920f" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61962) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 831.695972] env[61962]: DEBUG oslo_concurrency.lockutils [None req-36bc805c-1195-4d2a-9935-feb61c69e14d tempest-InstanceActionsTestJSON-347250734 tempest-InstanceActionsTestJSON-347250734-project-member] Lock "f53ed31d-4cca-418d-b708-4797dbc5920f" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61962) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 840.526720] env[61962]: DEBUG oslo_concurrency.lockutils [None req-78c50ddb-9a5d-49c4-8b81-91657e42e15a tempest-ServersTestJSON-410785708 tempest-ServersTestJSON-410785708-project-member] Acquiring lock "8ecd73a7-7f23-4994-85b9-479584831687" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61962) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 840.527139] env[61962]: DEBUG oslo_concurrency.lockutils [None req-78c50ddb-9a5d-49c4-8b81-91657e42e15a tempest-ServersTestJSON-410785708 tempest-ServersTestJSON-410785708-project-member] Lock "8ecd73a7-7f23-4994-85b9-479584831687" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=61962) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 841.557487] env[61962]: WARNING oslo_vmware.rw_handles [None req-eaddb9ef-13ca-4c01-8d47-47e1b7975f32 tempest-TenantUsagesTestJSON-1044362109 tempest-TenantUsagesTestJSON-1044362109-project-member] Error occurred while reading the HTTP response.: http.client.RemoteDisconnected: Remote end closed connection without response [ 841.557487] env[61962]: ERROR oslo_vmware.rw_handles Traceback (most recent call last): [ 841.557487] env[61962]: ERROR oslo_vmware.rw_handles File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py", line 283, in close [ 841.557487] env[61962]: ERROR oslo_vmware.rw_handles self._conn.getresponse() [ 841.557487] env[61962]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 1375, in getresponse [ 841.557487] env[61962]: ERROR oslo_vmware.rw_handles response.begin() [ 841.557487] env[61962]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 318, in begin [ 841.557487] env[61962]: ERROR oslo_vmware.rw_handles version, status, reason = self._read_status() [ 841.557487] env[61962]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 287, in _read_status [ 841.557487] env[61962]: ERROR oslo_vmware.rw_handles raise RemoteDisconnected("Remote end closed connection without" [ 841.557487] env[61962]: ERROR oslo_vmware.rw_handles http.client.RemoteDisconnected: Remote end closed connection without response [ 841.557487] env[61962]: ERROR oslo_vmware.rw_handles [ 841.558333] env[61962]: DEBUG nova.virt.vmwareapi.images [None req-eaddb9ef-13ca-4c01-8d47-47e1b7975f32 tempest-TenantUsagesTestJSON-1044362109 tempest-TenantUsagesTestJSON-1044362109-project-member] [instance: 1a7d5a5f-c5b1-40d2-828b-090fb8e76e3d] Downloaded image file data f684bb17-3ab2-4bd5-a19e-4c36c57d0ebe to vmware_temp/cf4d681e-fdf2-4685-b627-fc5b543c2292/f684bb17-3ab2-4bd5-a19e-4c36c57d0ebe/tmp-sparse.vmdk on the data store datastore2 {{(pid=61962) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:258}} [ 841.560737] env[61962]: DEBUG nova.virt.vmwareapi.vmops [None req-eaddb9ef-13ca-4c01-8d47-47e1b7975f32 tempest-TenantUsagesTestJSON-1044362109 tempest-TenantUsagesTestJSON-1044362109-project-member] [instance: 1a7d5a5f-c5b1-40d2-828b-090fb8e76e3d] Caching image {{(pid=61962) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 841.561136] env[61962]: DEBUG nova.virt.vmwareapi.vm_util [None req-eaddb9ef-13ca-4c01-8d47-47e1b7975f32 tempest-TenantUsagesTestJSON-1044362109 tempest-TenantUsagesTestJSON-1044362109-project-member] Copying Virtual Disk [datastore2] vmware_temp/cf4d681e-fdf2-4685-b627-fc5b543c2292/f684bb17-3ab2-4bd5-a19e-4c36c57d0ebe/tmp-sparse.vmdk to [datastore2] vmware_temp/cf4d681e-fdf2-4685-b627-fc5b543c2292/f684bb17-3ab2-4bd5-a19e-4c36c57d0ebe/f684bb17-3ab2-4bd5-a19e-4c36c57d0ebe.vmdk {{(pid=61962) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 841.561539] env[61962]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-83ca627d-e369-45e1-8934-9c263591e139 {{(pid=61962) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 841.571764] env[61962]: DEBUG oslo_vmware.api [None req-eaddb9ef-13ca-4c01-8d47-47e1b7975f32 tempest-TenantUsagesTestJSON-1044362109 tempest-TenantUsagesTestJSON-1044362109-project-member] Waiting for the task: (returnval){ [ 841.571764] env[61962]: value = "task-4891968" [ 841.571764] env[61962]: _type = "Task" [ 841.571764] env[61962]: } to complete. {{(pid=61962) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 841.579985] env[61962]: DEBUG oslo_vmware.api [None req-eaddb9ef-13ca-4c01-8d47-47e1b7975f32 tempest-TenantUsagesTestJSON-1044362109 tempest-TenantUsagesTestJSON-1044362109-project-member] Task: {'id': task-4891968, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61962) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 842.082692] env[61962]: DEBUG oslo_vmware.exceptions [None req-eaddb9ef-13ca-4c01-8d47-47e1b7975f32 tempest-TenantUsagesTestJSON-1044362109 tempest-TenantUsagesTestJSON-1044362109-project-member] Fault InvalidArgument not matched. {{(pid=61962) get_fault_class /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/exceptions.py:290}} [ 842.082999] env[61962]: DEBUG oslo_concurrency.lockutils [None req-eaddb9ef-13ca-4c01-8d47-47e1b7975f32 tempest-TenantUsagesTestJSON-1044362109 tempest-TenantUsagesTestJSON-1044362109-project-member] Releasing lock "[datastore2] devstack-image-cache_base/f684bb17-3ab2-4bd5-a19e-4c36c57d0ebe/f684bb17-3ab2-4bd5-a19e-4c36c57d0ebe.vmdk" {{(pid=61962) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 842.083573] env[61962]: ERROR nova.compute.manager [None req-eaddb9ef-13ca-4c01-8d47-47e1b7975f32 tempest-TenantUsagesTestJSON-1044362109 tempest-TenantUsagesTestJSON-1044362109-project-member] [instance: 1a7d5a5f-c5b1-40d2-828b-090fb8e76e3d] Instance failed to spawn: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 842.083573] env[61962]: Faults: ['InvalidArgument'] [ 842.083573] env[61962]: ERROR nova.compute.manager [instance: 1a7d5a5f-c5b1-40d2-828b-090fb8e76e3d] Traceback (most recent call last): [ 842.083573] env[61962]: ERROR nova.compute.manager [instance: 1a7d5a5f-c5b1-40d2-828b-090fb8e76e3d] File "/opt/stack/nova/nova/compute/manager.py", line 2886, in _build_resources [ 842.083573] env[61962]: ERROR nova.compute.manager [instance: 1a7d5a5f-c5b1-40d2-828b-090fb8e76e3d] yield resources [ 842.083573] env[61962]: ERROR nova.compute.manager [instance: 1a7d5a5f-c5b1-40d2-828b-090fb8e76e3d] File "/opt/stack/nova/nova/compute/manager.py", line 2633, in _build_and_run_instance [ 842.083573] env[61962]: ERROR nova.compute.manager [instance: 1a7d5a5f-c5b1-40d2-828b-090fb8e76e3d] self.driver.spawn(context, instance, image_meta, [ 842.083573] env[61962]: ERROR nova.compute.manager [instance: 1a7d5a5f-c5b1-40d2-828b-090fb8e76e3d] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 842.083573] env[61962]: ERROR nova.compute.manager [instance: 1a7d5a5f-c5b1-40d2-828b-090fb8e76e3d] self._vmops.spawn(context, instance, image_meta, injected_files, [ 842.083573] env[61962]: ERROR nova.compute.manager [instance: 1a7d5a5f-c5b1-40d2-828b-090fb8e76e3d] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 842.083573] env[61962]: ERROR nova.compute.manager [instance: 1a7d5a5f-c5b1-40d2-828b-090fb8e76e3d] self._fetch_image_if_missing(context, vi) [ 842.083573] env[61962]: ERROR nova.compute.manager [instance: 1a7d5a5f-c5b1-40d2-828b-090fb8e76e3d] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 842.083961] env[61962]: ERROR nova.compute.manager [instance: 1a7d5a5f-c5b1-40d2-828b-090fb8e76e3d] image_cache(vi, tmp_image_ds_loc) [ 842.083961] env[61962]: ERROR nova.compute.manager [instance: 1a7d5a5f-c5b1-40d2-828b-090fb8e76e3d] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 842.083961] env[61962]: ERROR nova.compute.manager [instance: 1a7d5a5f-c5b1-40d2-828b-090fb8e76e3d] vm_util.copy_virtual_disk( [ 842.083961] env[61962]: ERROR nova.compute.manager [instance: 1a7d5a5f-c5b1-40d2-828b-090fb8e76e3d] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 842.083961] env[61962]: ERROR nova.compute.manager [instance: 1a7d5a5f-c5b1-40d2-828b-090fb8e76e3d] session._wait_for_task(vmdk_copy_task) [ 842.083961] env[61962]: ERROR nova.compute.manager [instance: 1a7d5a5f-c5b1-40d2-828b-090fb8e76e3d] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 842.083961] env[61962]: ERROR nova.compute.manager [instance: 1a7d5a5f-c5b1-40d2-828b-090fb8e76e3d] return self.wait_for_task(task_ref) [ 842.083961] env[61962]: ERROR nova.compute.manager [instance: 1a7d5a5f-c5b1-40d2-828b-090fb8e76e3d] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 842.083961] env[61962]: ERROR nova.compute.manager [instance: 1a7d5a5f-c5b1-40d2-828b-090fb8e76e3d] return evt.wait() [ 842.083961] env[61962]: ERROR nova.compute.manager [instance: 1a7d5a5f-c5b1-40d2-828b-090fb8e76e3d] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 842.083961] env[61962]: ERROR nova.compute.manager [instance: 1a7d5a5f-c5b1-40d2-828b-090fb8e76e3d] result = hub.switch() [ 842.083961] env[61962]: ERROR nova.compute.manager [instance: 1a7d5a5f-c5b1-40d2-828b-090fb8e76e3d] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 842.083961] env[61962]: ERROR nova.compute.manager [instance: 1a7d5a5f-c5b1-40d2-828b-090fb8e76e3d] return self.greenlet.switch() [ 842.084416] env[61962]: ERROR nova.compute.manager [instance: 1a7d5a5f-c5b1-40d2-828b-090fb8e76e3d] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 842.084416] env[61962]: ERROR nova.compute.manager [instance: 1a7d5a5f-c5b1-40d2-828b-090fb8e76e3d] self.f(*self.args, **self.kw) [ 842.084416] env[61962]: ERROR nova.compute.manager [instance: 1a7d5a5f-c5b1-40d2-828b-090fb8e76e3d] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 842.084416] env[61962]: ERROR nova.compute.manager [instance: 1a7d5a5f-c5b1-40d2-828b-090fb8e76e3d] raise exceptions.translate_fault(task_info.error) [ 842.084416] env[61962]: ERROR nova.compute.manager [instance: 1a7d5a5f-c5b1-40d2-828b-090fb8e76e3d] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 842.084416] env[61962]: ERROR nova.compute.manager [instance: 1a7d5a5f-c5b1-40d2-828b-090fb8e76e3d] Faults: ['InvalidArgument'] [ 842.084416] env[61962]: ERROR nova.compute.manager [instance: 1a7d5a5f-c5b1-40d2-828b-090fb8e76e3d] [ 842.084416] env[61962]: INFO nova.compute.manager [None req-eaddb9ef-13ca-4c01-8d47-47e1b7975f32 tempest-TenantUsagesTestJSON-1044362109 tempest-TenantUsagesTestJSON-1044362109-project-member] [instance: 1a7d5a5f-c5b1-40d2-828b-090fb8e76e3d] Terminating instance [ 842.085482] env[61962]: DEBUG oslo_concurrency.lockutils [None req-911f86c7-75e1-4661-8b4b-23bb532c1f1c tempest-ImagesTestJSON-240028984 tempest-ImagesTestJSON-240028984-project-member] Acquired lock "[datastore2] devstack-image-cache_base/f684bb17-3ab2-4bd5-a19e-4c36c57d0ebe/f684bb17-3ab2-4bd5-a19e-4c36c57d0ebe.vmdk" {{(pid=61962) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 842.085775] env[61962]: DEBUG nova.virt.vmwareapi.ds_util [None req-911f86c7-75e1-4661-8b4b-23bb532c1f1c tempest-ImagesTestJSON-240028984 tempest-ImagesTestJSON-240028984-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=61962) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 842.086690] env[61962]: DEBUG nova.compute.manager [None req-eaddb9ef-13ca-4c01-8d47-47e1b7975f32 tempest-TenantUsagesTestJSON-1044362109 tempest-TenantUsagesTestJSON-1044362109-project-member] [instance: 1a7d5a5f-c5b1-40d2-828b-090fb8e76e3d] Start destroying the instance on the hypervisor. {{(pid=61962) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3142}} [ 842.086690] env[61962]: DEBUG nova.virt.vmwareapi.vmops [None req-eaddb9ef-13ca-4c01-8d47-47e1b7975f32 tempest-TenantUsagesTestJSON-1044362109 tempest-TenantUsagesTestJSON-1044362109-project-member] [instance: 1a7d5a5f-c5b1-40d2-828b-090fb8e76e3d] Destroying instance {{(pid=61962) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 842.086848] env[61962]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-8ce9dcbb-3f41-4ea0-8e5f-bbefb63d612e {{(pid=61962) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 842.089406] env[61962]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f32a583c-eb6a-4cfd-8d43-471dc50fc184 {{(pid=61962) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 842.097387] env[61962]: DEBUG nova.virt.vmwareapi.vmops [None req-eaddb9ef-13ca-4c01-8d47-47e1b7975f32 tempest-TenantUsagesTestJSON-1044362109 tempest-TenantUsagesTestJSON-1044362109-project-member] [instance: 1a7d5a5f-c5b1-40d2-828b-090fb8e76e3d] Unregistering the VM {{(pid=61962) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 842.097629] env[61962]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-f754e4ae-c497-473c-8c6e-c64b7746d636 {{(pid=61962) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 842.100260] env[61962]: DEBUG nova.virt.vmwareapi.ds_util [None req-911f86c7-75e1-4661-8b4b-23bb532c1f1c tempest-ImagesTestJSON-240028984 tempest-ImagesTestJSON-240028984-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=61962) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 842.100437] env[61962]: DEBUG nova.virt.vmwareapi.vmops [None req-911f86c7-75e1-4661-8b4b-23bb532c1f1c tempest-ImagesTestJSON-240028984 tempest-ImagesTestJSON-240028984-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=61962) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 842.101525] env[61962]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-ee8b409b-06e8-44c5-9c7f-1f991db3bf03 {{(pid=61962) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 842.108489] env[61962]: DEBUG oslo_vmware.api [None req-911f86c7-75e1-4661-8b4b-23bb532c1f1c tempest-ImagesTestJSON-240028984 tempest-ImagesTestJSON-240028984-project-member] Waiting for the task: (returnval){ [ 842.108489] env[61962]: value = "session[5210918f-aadc-9b29-42fa-0929c2e42627]52805add-ac48-61c9-c796-249bf308ebc9" [ 842.108489] env[61962]: _type = "Task" [ 842.108489] env[61962]: } to complete. {{(pid=61962) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 842.118351] env[61962]: DEBUG oslo_vmware.api [None req-911f86c7-75e1-4661-8b4b-23bb532c1f1c tempest-ImagesTestJSON-240028984 tempest-ImagesTestJSON-240028984-project-member] Task: {'id': session[5210918f-aadc-9b29-42fa-0929c2e42627]52805add-ac48-61c9-c796-249bf308ebc9, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61962) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 842.172025] env[61962]: DEBUG nova.virt.vmwareapi.vmops [None req-eaddb9ef-13ca-4c01-8d47-47e1b7975f32 tempest-TenantUsagesTestJSON-1044362109 tempest-TenantUsagesTestJSON-1044362109-project-member] [instance: 1a7d5a5f-c5b1-40d2-828b-090fb8e76e3d] Unregistered the VM {{(pid=61962) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 842.172025] env[61962]: DEBUG nova.virt.vmwareapi.vmops [None req-eaddb9ef-13ca-4c01-8d47-47e1b7975f32 tempest-TenantUsagesTestJSON-1044362109 tempest-TenantUsagesTestJSON-1044362109-project-member] [instance: 1a7d5a5f-c5b1-40d2-828b-090fb8e76e3d] Deleting contents of the VM from datastore datastore2 {{(pid=61962) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 842.172025] env[61962]: DEBUG nova.virt.vmwareapi.ds_util [None req-eaddb9ef-13ca-4c01-8d47-47e1b7975f32 tempest-TenantUsagesTestJSON-1044362109 tempest-TenantUsagesTestJSON-1044362109-project-member] Deleting the datastore file [datastore2] 1a7d5a5f-c5b1-40d2-828b-090fb8e76e3d {{(pid=61962) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 842.172025] env[61962]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-56c62bc4-8ed2-4b42-9b24-6a7a82d0c993 {{(pid=61962) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 842.178695] env[61962]: DEBUG oslo_vmware.api [None req-eaddb9ef-13ca-4c01-8d47-47e1b7975f32 tempest-TenantUsagesTestJSON-1044362109 tempest-TenantUsagesTestJSON-1044362109-project-member] Waiting for the task: (returnval){ [ 842.178695] env[61962]: value = "task-4891970" [ 842.178695] env[61962]: _type = "Task" [ 842.178695] env[61962]: } to complete. {{(pid=61962) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 842.186927] env[61962]: DEBUG oslo_vmware.api [None req-eaddb9ef-13ca-4c01-8d47-47e1b7975f32 tempest-TenantUsagesTestJSON-1044362109 tempest-TenantUsagesTestJSON-1044362109-project-member] Task: {'id': task-4891970, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61962) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 842.621790] env[61962]: DEBUG nova.virt.vmwareapi.vmops [None req-911f86c7-75e1-4661-8b4b-23bb532c1f1c tempest-ImagesTestJSON-240028984 tempest-ImagesTestJSON-240028984-project-member] [instance: d0df932e-822b-4c60-a588-445aa2f14cdb] Preparing fetch location {{(pid=61962) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 842.622162] env[61962]: DEBUG nova.virt.vmwareapi.ds_util [None req-911f86c7-75e1-4661-8b4b-23bb532c1f1c tempest-ImagesTestJSON-240028984 tempest-ImagesTestJSON-240028984-project-member] Creating directory with path [datastore2] vmware_temp/0ac47960-00b8-47af-b37f-3431a98ae349/f684bb17-3ab2-4bd5-a19e-4c36c57d0ebe {{(pid=61962) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 842.622297] env[61962]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-c2fc1a42-c677-4e56-a5c6-11a6bde52980 {{(pid=61962) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 842.635178] env[61962]: DEBUG nova.virt.vmwareapi.ds_util [None req-911f86c7-75e1-4661-8b4b-23bb532c1f1c tempest-ImagesTestJSON-240028984 tempest-ImagesTestJSON-240028984-project-member] Created directory with path [datastore2] vmware_temp/0ac47960-00b8-47af-b37f-3431a98ae349/f684bb17-3ab2-4bd5-a19e-4c36c57d0ebe {{(pid=61962) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 842.635358] env[61962]: DEBUG nova.virt.vmwareapi.vmops [None req-911f86c7-75e1-4661-8b4b-23bb532c1f1c tempest-ImagesTestJSON-240028984 tempest-ImagesTestJSON-240028984-project-member] [instance: d0df932e-822b-4c60-a588-445aa2f14cdb] Fetch image to [datastore2] vmware_temp/0ac47960-00b8-47af-b37f-3431a98ae349/f684bb17-3ab2-4bd5-a19e-4c36c57d0ebe/tmp-sparse.vmdk {{(pid=61962) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 842.635595] env[61962]: DEBUG nova.virt.vmwareapi.vmops [None req-911f86c7-75e1-4661-8b4b-23bb532c1f1c tempest-ImagesTestJSON-240028984 tempest-ImagesTestJSON-240028984-project-member] [instance: d0df932e-822b-4c60-a588-445aa2f14cdb] Downloading image file data f684bb17-3ab2-4bd5-a19e-4c36c57d0ebe to [datastore2] vmware_temp/0ac47960-00b8-47af-b37f-3431a98ae349/f684bb17-3ab2-4bd5-a19e-4c36c57d0ebe/tmp-sparse.vmdk on the data store datastore2 {{(pid=61962) _fetch_image_as_file /opt/stack/nova/nova/virt/vmwareapi/vmops.py:399}} [ 842.636586] env[61962]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f22a5d36-6eb6-4099-82d5-786015d5b7ea {{(pid=61962) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 842.643908] env[61962]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-abdc08b1-e2aa-4973-98a5-6ab60c759413 {{(pid=61962) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 842.653455] env[61962]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0341c4ef-ba6f-49ef-96fd-cf894eed6016 {{(pid=61962) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 842.687667] env[61962]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-679dd308-a44f-42f2-a6ff-9e5b84639dec {{(pid=61962) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 842.695567] env[61962]: DEBUG oslo_vmware.api [None req-eaddb9ef-13ca-4c01-8d47-47e1b7975f32 tempest-TenantUsagesTestJSON-1044362109 tempest-TenantUsagesTestJSON-1044362109-project-member] Task: {'id': task-4891970, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.108992} completed successfully. {{(pid=61962) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 842.697191] env[61962]: DEBUG nova.virt.vmwareapi.ds_util [None req-eaddb9ef-13ca-4c01-8d47-47e1b7975f32 tempest-TenantUsagesTestJSON-1044362109 tempest-TenantUsagesTestJSON-1044362109-project-member] Deleted the datastore file {{(pid=61962) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 842.697383] env[61962]: DEBUG nova.virt.vmwareapi.vmops [None req-eaddb9ef-13ca-4c01-8d47-47e1b7975f32 tempest-TenantUsagesTestJSON-1044362109 tempest-TenantUsagesTestJSON-1044362109-project-member] [instance: 1a7d5a5f-c5b1-40d2-828b-090fb8e76e3d] Deleted contents of the VM from datastore datastore2 {{(pid=61962) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 842.697557] env[61962]: DEBUG nova.virt.vmwareapi.vmops [None req-eaddb9ef-13ca-4c01-8d47-47e1b7975f32 tempest-TenantUsagesTestJSON-1044362109 tempest-TenantUsagesTestJSON-1044362109-project-member] [instance: 1a7d5a5f-c5b1-40d2-828b-090fb8e76e3d] Instance destroyed {{(pid=61962) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 842.697731] env[61962]: INFO nova.compute.manager [None req-eaddb9ef-13ca-4c01-8d47-47e1b7975f32 tempest-TenantUsagesTestJSON-1044362109 tempest-TenantUsagesTestJSON-1044362109-project-member] [instance: 1a7d5a5f-c5b1-40d2-828b-090fb8e76e3d] Took 0.61 seconds to destroy the instance on the hypervisor. [ 842.699702] env[61962]: DEBUG oslo_vmware.service [-] Invoking SessionManager.AcquireGenericServiceTicket with opID=oslo.vmware-3ed58b61-3d63-49a0-b605-45a020caabcb {{(pid=61962) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 842.701687] env[61962]: DEBUG nova.compute.claims [None req-eaddb9ef-13ca-4c01-8d47-47e1b7975f32 tempest-TenantUsagesTestJSON-1044362109 tempest-TenantUsagesTestJSON-1044362109-project-member] [instance: 1a7d5a5f-c5b1-40d2-828b-090fb8e76e3d] Aborting claim: {{(pid=61962) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 842.701863] env[61962]: DEBUG oslo_concurrency.lockutils [None req-eaddb9ef-13ca-4c01-8d47-47e1b7975f32 tempest-TenantUsagesTestJSON-1044362109 tempest-TenantUsagesTestJSON-1044362109-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=61962) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 842.702088] env[61962]: DEBUG oslo_concurrency.lockutils [None req-eaddb9ef-13ca-4c01-8d47-47e1b7975f32 tempest-TenantUsagesTestJSON-1044362109 tempest-TenantUsagesTestJSON-1044362109-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 0.000s {{(pid=61962) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 842.726074] env[61962]: DEBUG nova.virt.vmwareapi.images [None req-911f86c7-75e1-4661-8b4b-23bb532c1f1c tempest-ImagesTestJSON-240028984 tempest-ImagesTestJSON-240028984-project-member] [instance: d0df932e-822b-4c60-a588-445aa2f14cdb] Downloading image file data f684bb17-3ab2-4bd5-a19e-4c36c57d0ebe to the data store datastore2 {{(pid=61962) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:245}} [ 842.782909] env[61962]: DEBUG oslo_vmware.rw_handles [None req-911f86c7-75e1-4661-8b4b-23bb532c1f1c tempest-ImagesTestJSON-240028984 tempest-ImagesTestJSON-240028984-project-member] Creating HTTP connection to write to file with size = 21318656 and URL = https://esx7c2n3.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/0ac47960-00b8-47af-b37f-3431a98ae349/f684bb17-3ab2-4bd5-a19e-4c36c57d0ebe/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=61962) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 842.843247] env[61962]: DEBUG oslo_vmware.rw_handles [None req-911f86c7-75e1-4661-8b4b-23bb532c1f1c tempest-ImagesTestJSON-240028984 tempest-ImagesTestJSON-240028984-project-member] Completed reading data from the image iterator. {{(pid=61962) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 842.844143] env[61962]: DEBUG oslo_vmware.rw_handles [None req-911f86c7-75e1-4661-8b4b-23bb532c1f1c tempest-ImagesTestJSON-240028984 tempest-ImagesTestJSON-240028984-project-member] Closing write handle for https://esx7c2n3.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/0ac47960-00b8-47af-b37f-3431a98ae349/f684bb17-3ab2-4bd5-a19e-4c36c57d0ebe/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=61962) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:281}} [ 842.885073] env[61962]: DEBUG nova.scheduler.client.report [None req-eaddb9ef-13ca-4c01-8d47-47e1b7975f32 tempest-TenantUsagesTestJSON-1044362109 tempest-TenantUsagesTestJSON-1044362109-project-member] Refreshing inventories for resource provider 5a20dc57-fddd-49ec-bab5-953a03eebaa1 {{(pid=61962) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:818}} [ 842.902664] env[61962]: DEBUG nova.scheduler.client.report [None req-eaddb9ef-13ca-4c01-8d47-47e1b7975f32 tempest-TenantUsagesTestJSON-1044362109 tempest-TenantUsagesTestJSON-1044362109-project-member] Updating ProviderTree inventory for provider 5a20dc57-fddd-49ec-bab5-953a03eebaa1 from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 96, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61962) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:782}} [ 842.903031] env[61962]: DEBUG nova.compute.provider_tree [None req-eaddb9ef-13ca-4c01-8d47-47e1b7975f32 tempest-TenantUsagesTestJSON-1044362109 tempest-TenantUsagesTestJSON-1044362109-project-member] Updating inventory in ProviderTree for provider 5a20dc57-fddd-49ec-bab5-953a03eebaa1 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 96, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61962) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 842.915316] env[61962]: DEBUG nova.scheduler.client.report [None req-eaddb9ef-13ca-4c01-8d47-47e1b7975f32 tempest-TenantUsagesTestJSON-1044362109 tempest-TenantUsagesTestJSON-1044362109-project-member] Refreshing aggregate associations for resource provider 5a20dc57-fddd-49ec-bab5-953a03eebaa1, aggregates: None {{(pid=61962) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:827}} [ 842.939399] env[61962]: DEBUG nova.scheduler.client.report [None req-eaddb9ef-13ca-4c01-8d47-47e1b7975f32 tempest-TenantUsagesTestJSON-1044362109 tempest-TenantUsagesTestJSON-1044362109-project-member] Refreshing trait associations for resource provider 5a20dc57-fddd-49ec-bab5-953a03eebaa1, traits: COMPUTE_IMAGE_TYPE_ISO,COMPUTE_NODE,COMPUTE_IMAGE_TYPE_VMDK,COMPUTE_NET_ATTACH_INTERFACE,COMPUTE_SAME_HOST_COLD_MIGRATE {{(pid=61962) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:839}} [ 843.323076] env[61962]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5c792458-7689-44e1-97d0-57bc8333dbfe {{(pid=61962) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 843.332856] env[61962]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-11fce9c5-1e7d-4610-802b-d054579f8cb0 {{(pid=61962) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 843.363961] env[61962]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8a2a8760-5610-4519-90af-2ff012764614 {{(pid=61962) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 843.372815] env[61962]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-01286c64-f2ce-4a5b-8b3d-4157d923acc4 {{(pid=61962) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 843.386507] env[61962]: DEBUG nova.compute.provider_tree [None req-eaddb9ef-13ca-4c01-8d47-47e1b7975f32 tempest-TenantUsagesTestJSON-1044362109 tempest-TenantUsagesTestJSON-1044362109-project-member] Inventory has not changed in ProviderTree for provider: 5a20dc57-fddd-49ec-bab5-953a03eebaa1 {{(pid=61962) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 843.395348] env[61962]: DEBUG nova.scheduler.client.report [None req-eaddb9ef-13ca-4c01-8d47-47e1b7975f32 tempest-TenantUsagesTestJSON-1044362109 tempest-TenantUsagesTestJSON-1044362109-project-member] Inventory has not changed for provider 5a20dc57-fddd-49ec-bab5-953a03eebaa1 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 96, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61962) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 843.416448] env[61962]: DEBUG oslo_concurrency.lockutils [None req-eaddb9ef-13ca-4c01-8d47-47e1b7975f32 tempest-TenantUsagesTestJSON-1044362109 tempest-TenantUsagesTestJSON-1044362109-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 0.714s {{(pid=61962) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 843.417081] env[61962]: ERROR nova.compute.manager [None req-eaddb9ef-13ca-4c01-8d47-47e1b7975f32 tempest-TenantUsagesTestJSON-1044362109 tempest-TenantUsagesTestJSON-1044362109-project-member] [instance: 1a7d5a5f-c5b1-40d2-828b-090fb8e76e3d] Failed to build and run instance: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 843.417081] env[61962]: Faults: ['InvalidArgument'] [ 843.417081] env[61962]: ERROR nova.compute.manager [instance: 1a7d5a5f-c5b1-40d2-828b-090fb8e76e3d] Traceback (most recent call last): [ 843.417081] env[61962]: ERROR nova.compute.manager [instance: 1a7d5a5f-c5b1-40d2-828b-090fb8e76e3d] File "/opt/stack/nova/nova/compute/manager.py", line 2633, in _build_and_run_instance [ 843.417081] env[61962]: ERROR nova.compute.manager [instance: 1a7d5a5f-c5b1-40d2-828b-090fb8e76e3d] self.driver.spawn(context, instance, image_meta, [ 843.417081] env[61962]: ERROR nova.compute.manager [instance: 1a7d5a5f-c5b1-40d2-828b-090fb8e76e3d] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 843.417081] env[61962]: ERROR nova.compute.manager [instance: 1a7d5a5f-c5b1-40d2-828b-090fb8e76e3d] self._vmops.spawn(context, instance, image_meta, injected_files, [ 843.417081] env[61962]: ERROR nova.compute.manager [instance: 1a7d5a5f-c5b1-40d2-828b-090fb8e76e3d] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 843.417081] env[61962]: ERROR nova.compute.manager [instance: 1a7d5a5f-c5b1-40d2-828b-090fb8e76e3d] self._fetch_image_if_missing(context, vi) [ 843.417081] env[61962]: ERROR nova.compute.manager [instance: 1a7d5a5f-c5b1-40d2-828b-090fb8e76e3d] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 843.417081] env[61962]: ERROR nova.compute.manager [instance: 1a7d5a5f-c5b1-40d2-828b-090fb8e76e3d] image_cache(vi, tmp_image_ds_loc) [ 843.417081] env[61962]: ERROR nova.compute.manager [instance: 1a7d5a5f-c5b1-40d2-828b-090fb8e76e3d] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 843.417586] env[61962]: ERROR nova.compute.manager [instance: 1a7d5a5f-c5b1-40d2-828b-090fb8e76e3d] vm_util.copy_virtual_disk( [ 843.417586] env[61962]: ERROR nova.compute.manager [instance: 1a7d5a5f-c5b1-40d2-828b-090fb8e76e3d] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 843.417586] env[61962]: ERROR nova.compute.manager [instance: 1a7d5a5f-c5b1-40d2-828b-090fb8e76e3d] session._wait_for_task(vmdk_copy_task) [ 843.417586] env[61962]: ERROR nova.compute.manager [instance: 1a7d5a5f-c5b1-40d2-828b-090fb8e76e3d] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 843.417586] env[61962]: ERROR nova.compute.manager [instance: 1a7d5a5f-c5b1-40d2-828b-090fb8e76e3d] return self.wait_for_task(task_ref) [ 843.417586] env[61962]: ERROR nova.compute.manager [instance: 1a7d5a5f-c5b1-40d2-828b-090fb8e76e3d] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 843.417586] env[61962]: ERROR nova.compute.manager [instance: 1a7d5a5f-c5b1-40d2-828b-090fb8e76e3d] return evt.wait() [ 843.417586] env[61962]: ERROR nova.compute.manager [instance: 1a7d5a5f-c5b1-40d2-828b-090fb8e76e3d] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 843.417586] env[61962]: ERROR nova.compute.manager [instance: 1a7d5a5f-c5b1-40d2-828b-090fb8e76e3d] result = hub.switch() [ 843.417586] env[61962]: ERROR nova.compute.manager [instance: 1a7d5a5f-c5b1-40d2-828b-090fb8e76e3d] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 843.417586] env[61962]: ERROR nova.compute.manager [instance: 1a7d5a5f-c5b1-40d2-828b-090fb8e76e3d] return self.greenlet.switch() [ 843.417586] env[61962]: ERROR nova.compute.manager [instance: 1a7d5a5f-c5b1-40d2-828b-090fb8e76e3d] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 843.417586] env[61962]: ERROR nova.compute.manager [instance: 1a7d5a5f-c5b1-40d2-828b-090fb8e76e3d] self.f(*self.args, **self.kw) [ 843.418122] env[61962]: ERROR nova.compute.manager [instance: 1a7d5a5f-c5b1-40d2-828b-090fb8e76e3d] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 843.418122] env[61962]: ERROR nova.compute.manager [instance: 1a7d5a5f-c5b1-40d2-828b-090fb8e76e3d] raise exceptions.translate_fault(task_info.error) [ 843.418122] env[61962]: ERROR nova.compute.manager [instance: 1a7d5a5f-c5b1-40d2-828b-090fb8e76e3d] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 843.418122] env[61962]: ERROR nova.compute.manager [instance: 1a7d5a5f-c5b1-40d2-828b-090fb8e76e3d] Faults: ['InvalidArgument'] [ 843.418122] env[61962]: ERROR nova.compute.manager [instance: 1a7d5a5f-c5b1-40d2-828b-090fb8e76e3d] [ 843.418122] env[61962]: DEBUG nova.compute.utils [None req-eaddb9ef-13ca-4c01-8d47-47e1b7975f32 tempest-TenantUsagesTestJSON-1044362109 tempest-TenantUsagesTestJSON-1044362109-project-member] [instance: 1a7d5a5f-c5b1-40d2-828b-090fb8e76e3d] VimFaultException {{(pid=61962) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 843.419365] env[61962]: DEBUG nova.compute.manager [None req-eaddb9ef-13ca-4c01-8d47-47e1b7975f32 tempest-TenantUsagesTestJSON-1044362109 tempest-TenantUsagesTestJSON-1044362109-project-member] [instance: 1a7d5a5f-c5b1-40d2-828b-090fb8e76e3d] Build of instance 1a7d5a5f-c5b1-40d2-828b-090fb8e76e3d was re-scheduled: A specified parameter was not correct: fileType [ 843.419365] env[61962]: Faults: ['InvalidArgument'] {{(pid=61962) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2472}} [ 843.419785] env[61962]: DEBUG nova.compute.manager [None req-eaddb9ef-13ca-4c01-8d47-47e1b7975f32 tempest-TenantUsagesTestJSON-1044362109 tempest-TenantUsagesTestJSON-1044362109-project-member] [instance: 1a7d5a5f-c5b1-40d2-828b-090fb8e76e3d] Unplugging VIFs for instance {{(pid=61962) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:2998}} [ 843.419942] env[61962]: DEBUG nova.compute.manager [None req-eaddb9ef-13ca-4c01-8d47-47e1b7975f32 tempest-TenantUsagesTestJSON-1044362109 tempest-TenantUsagesTestJSON-1044362109-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=61962) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3021}} [ 843.420081] env[61962]: DEBUG nova.compute.manager [None req-eaddb9ef-13ca-4c01-8d47-47e1b7975f32 tempest-TenantUsagesTestJSON-1044362109 tempest-TenantUsagesTestJSON-1044362109-project-member] [instance: 1a7d5a5f-c5b1-40d2-828b-090fb8e76e3d] Deallocating network for instance {{(pid=61962) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2281}} [ 843.420243] env[61962]: DEBUG nova.network.neutron [None req-eaddb9ef-13ca-4c01-8d47-47e1b7975f32 tempest-TenantUsagesTestJSON-1044362109 tempest-TenantUsagesTestJSON-1044362109-project-member] [instance: 1a7d5a5f-c5b1-40d2-828b-090fb8e76e3d] deallocate_for_instance() {{(pid=61962) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 843.873326] env[61962]: DEBUG nova.network.neutron [None req-eaddb9ef-13ca-4c01-8d47-47e1b7975f32 tempest-TenantUsagesTestJSON-1044362109 tempest-TenantUsagesTestJSON-1044362109-project-member] [instance: 1a7d5a5f-c5b1-40d2-828b-090fb8e76e3d] Updating instance_info_cache with network_info: [] {{(pid=61962) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 843.892021] env[61962]: INFO nova.compute.manager [None req-eaddb9ef-13ca-4c01-8d47-47e1b7975f32 tempest-TenantUsagesTestJSON-1044362109 tempest-TenantUsagesTestJSON-1044362109-project-member] [instance: 1a7d5a5f-c5b1-40d2-828b-090fb8e76e3d] Took 0.47 seconds to deallocate network for instance. [ 844.004868] env[61962]: INFO nova.scheduler.client.report [None req-eaddb9ef-13ca-4c01-8d47-47e1b7975f32 tempest-TenantUsagesTestJSON-1044362109 tempest-TenantUsagesTestJSON-1044362109-project-member] Deleted allocations for instance 1a7d5a5f-c5b1-40d2-828b-090fb8e76e3d [ 844.027784] env[61962]: DEBUG oslo_concurrency.lockutils [None req-eaddb9ef-13ca-4c01-8d47-47e1b7975f32 tempest-TenantUsagesTestJSON-1044362109 tempest-TenantUsagesTestJSON-1044362109-project-member] Lock "1a7d5a5f-c5b1-40d2-828b-090fb8e76e3d" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 292.716s {{(pid=61962) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 844.028575] env[61962]: DEBUG oslo_concurrency.lockutils [None req-f0b41a02-3e76-49fe-ad00-90824eaa248d tempest-TenantUsagesTestJSON-1044362109 tempest-TenantUsagesTestJSON-1044362109-project-member] Lock "1a7d5a5f-c5b1-40d2-828b-090fb8e76e3d" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 92.614s {{(pid=61962) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 844.029093] env[61962]: DEBUG oslo_concurrency.lockutils [None req-f0b41a02-3e76-49fe-ad00-90824eaa248d tempest-TenantUsagesTestJSON-1044362109 tempest-TenantUsagesTestJSON-1044362109-project-member] Acquiring lock "1a7d5a5f-c5b1-40d2-828b-090fb8e76e3d-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=61962) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 844.029093] env[61962]: DEBUG oslo_concurrency.lockutils [None req-f0b41a02-3e76-49fe-ad00-90824eaa248d tempest-TenantUsagesTestJSON-1044362109 tempest-TenantUsagesTestJSON-1044362109-project-member] Lock "1a7d5a5f-c5b1-40d2-828b-090fb8e76e3d-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=61962) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 844.029182] env[61962]: DEBUG oslo_concurrency.lockutils [None req-f0b41a02-3e76-49fe-ad00-90824eaa248d tempest-TenantUsagesTestJSON-1044362109 tempest-TenantUsagesTestJSON-1044362109-project-member] Lock "1a7d5a5f-c5b1-40d2-828b-090fb8e76e3d-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=61962) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 844.032323] env[61962]: INFO nova.compute.manager [None req-f0b41a02-3e76-49fe-ad00-90824eaa248d tempest-TenantUsagesTestJSON-1044362109 tempest-TenantUsagesTestJSON-1044362109-project-member] [instance: 1a7d5a5f-c5b1-40d2-828b-090fb8e76e3d] Terminating instance [ 844.034371] env[61962]: DEBUG nova.compute.manager [None req-f0b41a02-3e76-49fe-ad00-90824eaa248d tempest-TenantUsagesTestJSON-1044362109 tempest-TenantUsagesTestJSON-1044362109-project-member] [instance: 1a7d5a5f-c5b1-40d2-828b-090fb8e76e3d] Start destroying the instance on the hypervisor. {{(pid=61962) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3142}} [ 844.034563] env[61962]: DEBUG nova.virt.vmwareapi.vmops [None req-f0b41a02-3e76-49fe-ad00-90824eaa248d tempest-TenantUsagesTestJSON-1044362109 tempest-TenantUsagesTestJSON-1044362109-project-member] [instance: 1a7d5a5f-c5b1-40d2-828b-090fb8e76e3d] Destroying instance {{(pid=61962) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 844.034859] env[61962]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-dc115bf2-b3bd-4bcf-b4dd-c047a15879f6 {{(pid=61962) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 844.044920] env[61962]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d9efeef7-c148-4ffd-973e-5cef109a43f9 {{(pid=61962) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 844.057265] env[61962]: DEBUG nova.compute.manager [None req-e175c8ae-7160-4ad7-b0bc-3fb054dd71be tempest-ServerActionsV293TestJSON-1621295222 tempest-ServerActionsV293TestJSON-1621295222-project-member] [instance: 2dad2cb0-56e2-4fb6-b540-0fd98265697f] Starting instance... {{(pid=61962) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2424}} [ 844.082375] env[61962]: WARNING nova.virt.vmwareapi.vmops [None req-f0b41a02-3e76-49fe-ad00-90824eaa248d tempest-TenantUsagesTestJSON-1044362109 tempest-TenantUsagesTestJSON-1044362109-project-member] [instance: 1a7d5a5f-c5b1-40d2-828b-090fb8e76e3d] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 1a7d5a5f-c5b1-40d2-828b-090fb8e76e3d could not be found. [ 844.082596] env[61962]: DEBUG nova.virt.vmwareapi.vmops [None req-f0b41a02-3e76-49fe-ad00-90824eaa248d tempest-TenantUsagesTestJSON-1044362109 tempest-TenantUsagesTestJSON-1044362109-project-member] [instance: 1a7d5a5f-c5b1-40d2-828b-090fb8e76e3d] Instance destroyed {{(pid=61962) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 844.082782] env[61962]: INFO nova.compute.manager [None req-f0b41a02-3e76-49fe-ad00-90824eaa248d tempest-TenantUsagesTestJSON-1044362109 tempest-TenantUsagesTestJSON-1044362109-project-member] [instance: 1a7d5a5f-c5b1-40d2-828b-090fb8e76e3d] Took 0.05 seconds to destroy the instance on the hypervisor. [ 844.083048] env[61962]: DEBUG oslo.service.loopingcall [None req-f0b41a02-3e76-49fe-ad00-90824eaa248d tempest-TenantUsagesTestJSON-1044362109 tempest-TenantUsagesTestJSON-1044362109-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61962) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 844.083338] env[61962]: DEBUG nova.compute.manager [-] [instance: 1a7d5a5f-c5b1-40d2-828b-090fb8e76e3d] Deallocating network for instance {{(pid=61962) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2281}} [ 844.083443] env[61962]: DEBUG nova.network.neutron [-] [instance: 1a7d5a5f-c5b1-40d2-828b-090fb8e76e3d] deallocate_for_instance() {{(pid=61962) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 844.086019] env[61962]: DEBUG nova.compute.manager [None req-e175c8ae-7160-4ad7-b0bc-3fb054dd71be tempest-ServerActionsV293TestJSON-1621295222 tempest-ServerActionsV293TestJSON-1621295222-project-member] [instance: 2dad2cb0-56e2-4fb6-b540-0fd98265697f] Instance disappeared before build. {{(pid=61962) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2431}} [ 844.110296] env[61962]: DEBUG oslo_concurrency.lockutils [None req-e175c8ae-7160-4ad7-b0bc-3fb054dd71be tempest-ServerActionsV293TestJSON-1621295222 tempest-ServerActionsV293TestJSON-1621295222-project-member] Lock "2dad2cb0-56e2-4fb6-b540-0fd98265697f" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 236.660s {{(pid=61962) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 844.112628] env[61962]: DEBUG nova.network.neutron [-] [instance: 1a7d5a5f-c5b1-40d2-828b-090fb8e76e3d] Updating instance_info_cache with network_info: [] {{(pid=61962) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 844.121724] env[61962]: DEBUG nova.compute.manager [None req-2efed426-f73a-4ebe-8065-0c95039aec6f tempest-ServersListShow296Test-974296751 tempest-ServersListShow296Test-974296751-project-member] [instance: 8436449e-8f8d-4fd0-96be-31f1464f03ae] Starting instance... {{(pid=61962) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2424}} [ 844.127024] env[61962]: INFO nova.compute.manager [-] [instance: 1a7d5a5f-c5b1-40d2-828b-090fb8e76e3d] Took 0.04 seconds to deallocate network for instance. [ 844.146340] env[61962]: DEBUG nova.compute.manager [None req-2efed426-f73a-4ebe-8065-0c95039aec6f tempest-ServersListShow296Test-974296751 tempest-ServersListShow296Test-974296751-project-member] [instance: 8436449e-8f8d-4fd0-96be-31f1464f03ae] Instance disappeared before build. {{(pid=61962) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2431}} [ 844.174718] env[61962]: DEBUG oslo_concurrency.lockutils [None req-2efed426-f73a-4ebe-8065-0c95039aec6f tempest-ServersListShow296Test-974296751 tempest-ServersListShow296Test-974296751-project-member] Lock "8436449e-8f8d-4fd0-96be-31f1464f03ae" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 236.407s {{(pid=61962) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 844.187170] env[61962]: DEBUG nova.compute.manager [None req-0fa19bb3-4f67-4f1b-9bd9-2589e1a304ec tempest-InstanceActionsV221TestJSON-1432779233 tempest-InstanceActionsV221TestJSON-1432779233-project-member] [instance: 92f66377-ddec-4312-b10c-f7edc4855d48] Starting instance... {{(pid=61962) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2424}} [ 844.244849] env[61962]: DEBUG nova.compute.manager [None req-0fa19bb3-4f67-4f1b-9bd9-2589e1a304ec tempest-InstanceActionsV221TestJSON-1432779233 tempest-InstanceActionsV221TestJSON-1432779233-project-member] [instance: 92f66377-ddec-4312-b10c-f7edc4855d48] Instance disappeared before build. {{(pid=61962) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2431}} [ 844.280205] env[61962]: DEBUG oslo_concurrency.lockutils [None req-0fa19bb3-4f67-4f1b-9bd9-2589e1a304ec tempest-InstanceActionsV221TestJSON-1432779233 tempest-InstanceActionsV221TestJSON-1432779233-project-member] Lock "92f66377-ddec-4312-b10c-f7edc4855d48" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 234.351s {{(pid=61962) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 844.295050] env[61962]: DEBUG oslo_concurrency.lockutils [None req-f0b41a02-3e76-49fe-ad00-90824eaa248d tempest-TenantUsagesTestJSON-1044362109 tempest-TenantUsagesTestJSON-1044362109-project-member] Lock "1a7d5a5f-c5b1-40d2-828b-090fb8e76e3d" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 0.265s {{(pid=61962) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 844.295684] env[61962]: DEBUG nova.compute.manager [None req-780aa977-c838-472d-b3f8-e84f37c4403f tempest-ServerActionsTestOtherA-1411072425 tempest-ServerActionsTestOtherA-1411072425-project-member] [instance: 17bc2689-2250-48bc-bc0e-47821b20d3ba] Starting instance... {{(pid=61962) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2424}} [ 844.324356] env[61962]: DEBUG nova.compute.manager [None req-780aa977-c838-472d-b3f8-e84f37c4403f tempest-ServerActionsTestOtherA-1411072425 tempest-ServerActionsTestOtherA-1411072425-project-member] [instance: 17bc2689-2250-48bc-bc0e-47821b20d3ba] Instance disappeared before build. {{(pid=61962) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2431}} [ 844.351941] env[61962]: DEBUG oslo_concurrency.lockutils [None req-780aa977-c838-472d-b3f8-e84f37c4403f tempest-ServerActionsTestOtherA-1411072425 tempest-ServerActionsTestOtherA-1411072425-project-member] Lock "17bc2689-2250-48bc-bc0e-47821b20d3ba" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 231.191s {{(pid=61962) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 844.362859] env[61962]: DEBUG nova.compute.manager [None req-b422153a-e67e-476e-bf23-70d15cc3aff1 tempest-ServersTestMultiNic-318652074 tempest-ServersTestMultiNic-318652074-project-member] [instance: 146b08c3-3893-4c0e-9448-05a988c19ae8] Starting instance... {{(pid=61962) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2424}} [ 844.395940] env[61962]: DEBUG nova.compute.manager [None req-b422153a-e67e-476e-bf23-70d15cc3aff1 tempest-ServersTestMultiNic-318652074 tempest-ServersTestMultiNic-318652074-project-member] [instance: 146b08c3-3893-4c0e-9448-05a988c19ae8] Instance disappeared before build. {{(pid=61962) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2431}} [ 844.418743] env[61962]: DEBUG oslo_concurrency.lockutils [None req-b422153a-e67e-476e-bf23-70d15cc3aff1 tempest-ServersTestMultiNic-318652074 tempest-ServersTestMultiNic-318652074-project-member] Lock "146b08c3-3893-4c0e-9448-05a988c19ae8" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 227.901s {{(pid=61962) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 844.443417] env[61962]: DEBUG nova.compute.manager [None req-0c7770ba-e86c-4883-8d4f-ff07f7134239 tempest-AttachVolumeTestJSON-1002857752 tempest-AttachVolumeTestJSON-1002857752-project-member] [instance: 2ef4b06a-c5c6-46ed-89d8-30143aa30a25] Starting instance... {{(pid=61962) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2424}} [ 844.471778] env[61962]: DEBUG nova.compute.manager [None req-0c7770ba-e86c-4883-8d4f-ff07f7134239 tempest-AttachVolumeTestJSON-1002857752 tempest-AttachVolumeTestJSON-1002857752-project-member] [instance: 2ef4b06a-c5c6-46ed-89d8-30143aa30a25] Instance disappeared before build. {{(pid=61962) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2431}} [ 844.501253] env[61962]: DEBUG oslo_concurrency.lockutils [None req-0c7770ba-e86c-4883-8d4f-ff07f7134239 tempest-AttachVolumeTestJSON-1002857752 tempest-AttachVolumeTestJSON-1002857752-project-member] Lock "2ef4b06a-c5c6-46ed-89d8-30143aa30a25" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 220.292s {{(pid=61962) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 844.513391] env[61962]: DEBUG nova.compute.manager [None req-a06e4745-56c4-49f7-b260-41d5e755269d tempest-VolumesAdminNegativeTest-1349254896 tempest-VolumesAdminNegativeTest-1349254896-project-member] [instance: 2142b6e8-d060-48eb-8ff1-3e92e6188a83] Starting instance... {{(pid=61962) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2424}} [ 844.539432] env[61962]: DEBUG nova.compute.manager [None req-a06e4745-56c4-49f7-b260-41d5e755269d tempest-VolumesAdminNegativeTest-1349254896 tempest-VolumesAdminNegativeTest-1349254896-project-member] [instance: 2142b6e8-d060-48eb-8ff1-3e92e6188a83] Instance disappeared before build. {{(pid=61962) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2431}} [ 844.560548] env[61962]: DEBUG oslo_concurrency.lockutils [None req-a06e4745-56c4-49f7-b260-41d5e755269d tempest-VolumesAdminNegativeTest-1349254896 tempest-VolumesAdminNegativeTest-1349254896-project-member] Lock "2142b6e8-d060-48eb-8ff1-3e92e6188a83" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 219.532s {{(pid=61962) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 844.570151] env[61962]: DEBUG nova.compute.manager [None req-66ccc47b-35fa-4b88-a452-e1d0ad54e239 tempest-ServerDiskConfigTestJSON-1190480345 tempest-ServerDiskConfigTestJSON-1190480345-project-member] [instance: dbe5937a-cf98-4abc-ad5a-521762e0be9d] Starting instance... {{(pid=61962) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2424}} [ 844.596344] env[61962]: DEBUG nova.compute.manager [None req-66ccc47b-35fa-4b88-a452-e1d0ad54e239 tempest-ServerDiskConfigTestJSON-1190480345 tempest-ServerDiskConfigTestJSON-1190480345-project-member] [instance: dbe5937a-cf98-4abc-ad5a-521762e0be9d] Instance disappeared before build. {{(pid=61962) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2431}} [ 844.619486] env[61962]: DEBUG oslo_concurrency.lockutils [None req-66ccc47b-35fa-4b88-a452-e1d0ad54e239 tempest-ServerDiskConfigTestJSON-1190480345 tempest-ServerDiskConfigTestJSON-1190480345-project-member] Lock "dbe5937a-cf98-4abc-ad5a-521762e0be9d" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 212.850s {{(pid=61962) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 844.628454] env[61962]: DEBUG nova.compute.manager [None req-bb9b0d23-3bd4-41a5-986c-f09e040cfac8 tempest-AttachInterfacesV270Test-634782997 tempest-AttachInterfacesV270Test-634782997-project-member] [instance: b387b903-2f5c-40ad-aeea-b013fb66e17d] Starting instance... {{(pid=61962) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2424}} [ 844.652143] env[61962]: DEBUG nova.compute.manager [None req-bb9b0d23-3bd4-41a5-986c-f09e040cfac8 tempest-AttachInterfacesV270Test-634782997 tempest-AttachInterfacesV270Test-634782997-project-member] [instance: b387b903-2f5c-40ad-aeea-b013fb66e17d] Instance disappeared before build. {{(pid=61962) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2431}} [ 844.673936] env[61962]: DEBUG oslo_concurrency.lockutils [None req-bb9b0d23-3bd4-41a5-986c-f09e040cfac8 tempest-AttachInterfacesV270Test-634782997 tempest-AttachInterfacesV270Test-634782997-project-member] Lock "b387b903-2f5c-40ad-aeea-b013fb66e17d" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 203.414s {{(pid=61962) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 844.689860] env[61962]: DEBUG nova.compute.manager [None req-39280a80-f6a9-4818-9c77-d5be7d130e72 tempest-ServersTestFqdnHostnames-1235394237 tempest-ServersTestFqdnHostnames-1235394237-project-member] [instance: bf82c44c-95ce-4e4c-a32f-444bba1ef5cd] Starting instance... {{(pid=61962) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2424}} [ 844.767963] env[61962]: DEBUG oslo_concurrency.lockutils [None req-39280a80-f6a9-4818-9c77-d5be7d130e72 tempest-ServersTestFqdnHostnames-1235394237 tempest-ServersTestFqdnHostnames-1235394237-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61962) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 844.768474] env[61962]: DEBUG oslo_concurrency.lockutils [None req-39280a80-f6a9-4818-9c77-d5be7d130e72 tempest-ServersTestFqdnHostnames-1235394237 tempest-ServersTestFqdnHostnames-1235394237-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=61962) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 844.770334] env[61962]: INFO nova.compute.claims [None req-39280a80-f6a9-4818-9c77-d5be7d130e72 tempest-ServersTestFqdnHostnames-1235394237 tempest-ServersTestFqdnHostnames-1235394237-project-member] [instance: bf82c44c-95ce-4e4c-a32f-444bba1ef5cd] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 845.208940] env[61962]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-196f77ed-c075-4aca-8434-b97d8d04326d {{(pid=61962) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 845.218317] env[61962]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-851429e0-cd79-4df8-8dcb-b5da7e6f5686 {{(pid=61962) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 845.251957] env[61962]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-83a38e1e-055d-4155-a1e4-053918e9de23 {{(pid=61962) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 845.260987] env[61962]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d8ede610-df15-4c6e-b637-0409ccea0afc {{(pid=61962) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 845.275382] env[61962]: DEBUG nova.compute.provider_tree [None req-39280a80-f6a9-4818-9c77-d5be7d130e72 tempest-ServersTestFqdnHostnames-1235394237 tempest-ServersTestFqdnHostnames-1235394237-project-member] Inventory has not changed in ProviderTree for provider: 5a20dc57-fddd-49ec-bab5-953a03eebaa1 {{(pid=61962) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 845.285368] env[61962]: DEBUG nova.scheduler.client.report [None req-39280a80-f6a9-4818-9c77-d5be7d130e72 tempest-ServersTestFqdnHostnames-1235394237 tempest-ServersTestFqdnHostnames-1235394237-project-member] Inventory has not changed for provider 5a20dc57-fddd-49ec-bab5-953a03eebaa1 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 96, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61962) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 845.300023] env[61962]: DEBUG oslo_concurrency.lockutils [None req-39280a80-f6a9-4818-9c77-d5be7d130e72 tempest-ServersTestFqdnHostnames-1235394237 tempest-ServersTestFqdnHostnames-1235394237-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.532s {{(pid=61962) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 845.300675] env[61962]: DEBUG nova.compute.manager [None req-39280a80-f6a9-4818-9c77-d5be7d130e72 tempest-ServersTestFqdnHostnames-1235394237 tempest-ServersTestFqdnHostnames-1235394237-project-member] [instance: bf82c44c-95ce-4e4c-a32f-444bba1ef5cd] Start building networks asynchronously for instance. {{(pid=61962) _build_resources /opt/stack/nova/nova/compute/manager.py:2821}} [ 845.343173] env[61962]: DEBUG nova.compute.utils [None req-39280a80-f6a9-4818-9c77-d5be7d130e72 tempest-ServersTestFqdnHostnames-1235394237 tempest-ServersTestFqdnHostnames-1235394237-project-member] Using /dev/sd instead of None {{(pid=61962) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 845.344939] env[61962]: DEBUG nova.compute.manager [None req-39280a80-f6a9-4818-9c77-d5be7d130e72 tempest-ServersTestFqdnHostnames-1235394237 tempest-ServersTestFqdnHostnames-1235394237-project-member] [instance: bf82c44c-95ce-4e4c-a32f-444bba1ef5cd] Allocating IP information in the background. {{(pid=61962) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1973}} [ 845.345195] env[61962]: DEBUG nova.network.neutron [None req-39280a80-f6a9-4818-9c77-d5be7d130e72 tempest-ServersTestFqdnHostnames-1235394237 tempest-ServersTestFqdnHostnames-1235394237-project-member] [instance: bf82c44c-95ce-4e4c-a32f-444bba1ef5cd] allocate_for_instance() {{(pid=61962) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 845.355740] env[61962]: DEBUG nova.compute.manager [None req-39280a80-f6a9-4818-9c77-d5be7d130e72 tempest-ServersTestFqdnHostnames-1235394237 tempest-ServersTestFqdnHostnames-1235394237-project-member] [instance: bf82c44c-95ce-4e4c-a32f-444bba1ef5cd] Start building block device mappings for instance. {{(pid=61962) _build_resources /opt/stack/nova/nova/compute/manager.py:2856}} [ 845.457578] env[61962]: DEBUG nova.policy [None req-39280a80-f6a9-4818-9c77-d5be7d130e72 tempest-ServersTestFqdnHostnames-1235394237 tempest-ServersTestFqdnHostnames-1235394237-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'f13df47a10ac456b9e27646e67a49154', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '389fb8d4c2ce47829fe23a6ca60e9bdd', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61962) authorize /opt/stack/nova/nova/policy.py:203}} [ 845.477172] env[61962]: DEBUG nova.compute.manager [None req-39280a80-f6a9-4818-9c77-d5be7d130e72 tempest-ServersTestFqdnHostnames-1235394237 tempest-ServersTestFqdnHostnames-1235394237-project-member] [instance: bf82c44c-95ce-4e4c-a32f-444bba1ef5cd] Start spawning the instance on the hypervisor. {{(pid=61962) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2630}} [ 845.510184] env[61962]: DEBUG nova.virt.hardware [None req-39280a80-f6a9-4818-9c77-d5be7d130e72 tempest-ServersTestFqdnHostnames-1235394237 tempest-ServersTestFqdnHostnames-1235394237-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-12-01T12:09:23Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=128,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-12-01T12:09:07Z,direct_url=,disk_format='vmdk',id=f684bb17-3ab2-4bd5-a19e-4c36c57d0ebe,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='eef556fb5c4f49b889491ae31a496de5',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-12-01T12:09:08Z,virtual_size=,visibility=), allow threads: False {{(pid=61962) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 845.510452] env[61962]: DEBUG nova.virt.hardware [None req-39280a80-f6a9-4818-9c77-d5be7d130e72 tempest-ServersTestFqdnHostnames-1235394237 tempest-ServersTestFqdnHostnames-1235394237-project-member] Flavor limits 0:0:0 {{(pid=61962) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 845.510627] env[61962]: DEBUG nova.virt.hardware [None req-39280a80-f6a9-4818-9c77-d5be7d130e72 tempest-ServersTestFqdnHostnames-1235394237 tempest-ServersTestFqdnHostnames-1235394237-project-member] Image limits 0:0:0 {{(pid=61962) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 845.510816] env[61962]: DEBUG nova.virt.hardware [None req-39280a80-f6a9-4818-9c77-d5be7d130e72 tempest-ServersTestFqdnHostnames-1235394237 tempest-ServersTestFqdnHostnames-1235394237-project-member] Flavor pref 0:0:0 {{(pid=61962) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 845.510962] env[61962]: DEBUG nova.virt.hardware [None req-39280a80-f6a9-4818-9c77-d5be7d130e72 tempest-ServersTestFqdnHostnames-1235394237 tempest-ServersTestFqdnHostnames-1235394237-project-member] Image pref 0:0:0 {{(pid=61962) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 845.511140] env[61962]: DEBUG nova.virt.hardware [None req-39280a80-f6a9-4818-9c77-d5be7d130e72 tempest-ServersTestFqdnHostnames-1235394237 tempest-ServersTestFqdnHostnames-1235394237-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61962) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 845.511349] env[61962]: DEBUG nova.virt.hardware [None req-39280a80-f6a9-4818-9c77-d5be7d130e72 tempest-ServersTestFqdnHostnames-1235394237 tempest-ServersTestFqdnHostnames-1235394237-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61962) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 845.511524] env[61962]: DEBUG nova.virt.hardware [None req-39280a80-f6a9-4818-9c77-d5be7d130e72 tempest-ServersTestFqdnHostnames-1235394237 tempest-ServersTestFqdnHostnames-1235394237-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61962) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 845.511696] env[61962]: DEBUG nova.virt.hardware [None req-39280a80-f6a9-4818-9c77-d5be7d130e72 tempest-ServersTestFqdnHostnames-1235394237 tempest-ServersTestFqdnHostnames-1235394237-project-member] Got 1 possible topologies {{(pid=61962) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 845.511869] env[61962]: DEBUG nova.virt.hardware [None req-39280a80-f6a9-4818-9c77-d5be7d130e72 tempest-ServersTestFqdnHostnames-1235394237 tempest-ServersTestFqdnHostnames-1235394237-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61962) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 845.512105] env[61962]: DEBUG nova.virt.hardware [None req-39280a80-f6a9-4818-9c77-d5be7d130e72 tempest-ServersTestFqdnHostnames-1235394237 tempest-ServersTestFqdnHostnames-1235394237-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61962) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 845.512966] env[61962]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a5003488-bda1-4206-b446-2a251d466168 {{(pid=61962) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 845.525226] env[61962]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a478caf9-84b6-4bef-81f0-a28c641f7c57 {{(pid=61962) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 846.227354] env[61962]: DEBUG nova.network.neutron [None req-39280a80-f6a9-4818-9c77-d5be7d130e72 tempest-ServersTestFqdnHostnames-1235394237 tempest-ServersTestFqdnHostnames-1235394237-project-member] [instance: bf82c44c-95ce-4e4c-a32f-444bba1ef5cd] Successfully created port: 331f0490-3834-4c36-8d6a-c0b03d18148c {{(pid=61962) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 847.152342] env[61962]: DEBUG nova.compute.manager [req-c9828947-e1dc-46bd-8ba0-372ffe64b76a req-ca3ddf89-18d9-4754-aeb7-463445bc0fbf service nova] [instance: bf82c44c-95ce-4e4c-a32f-444bba1ef5cd] Received event network-vif-plugged-331f0490-3834-4c36-8d6a-c0b03d18148c {{(pid=61962) external_instance_event /opt/stack/nova/nova/compute/manager.py:11210}} [ 847.152556] env[61962]: DEBUG oslo_concurrency.lockutils [req-c9828947-e1dc-46bd-8ba0-372ffe64b76a req-ca3ddf89-18d9-4754-aeb7-463445bc0fbf service nova] Acquiring lock "bf82c44c-95ce-4e4c-a32f-444bba1ef5cd-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=61962) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 847.152802] env[61962]: DEBUG oslo_concurrency.lockutils [req-c9828947-e1dc-46bd-8ba0-372ffe64b76a req-ca3ddf89-18d9-4754-aeb7-463445bc0fbf service nova] Lock "bf82c44c-95ce-4e4c-a32f-444bba1ef5cd-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=61962) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 847.152921] env[61962]: DEBUG oslo_concurrency.lockutils [req-c9828947-e1dc-46bd-8ba0-372ffe64b76a req-ca3ddf89-18d9-4754-aeb7-463445bc0fbf service nova] Lock "bf82c44c-95ce-4e4c-a32f-444bba1ef5cd-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=61962) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 847.153102] env[61962]: DEBUG nova.compute.manager [req-c9828947-e1dc-46bd-8ba0-372ffe64b76a req-ca3ddf89-18d9-4754-aeb7-463445bc0fbf service nova] [instance: bf82c44c-95ce-4e4c-a32f-444bba1ef5cd] No waiting events found dispatching network-vif-plugged-331f0490-3834-4c36-8d6a-c0b03d18148c {{(pid=61962) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 847.153276] env[61962]: WARNING nova.compute.manager [req-c9828947-e1dc-46bd-8ba0-372ffe64b76a req-ca3ddf89-18d9-4754-aeb7-463445bc0fbf service nova] [instance: bf82c44c-95ce-4e4c-a32f-444bba1ef5cd] Received unexpected event network-vif-plugged-331f0490-3834-4c36-8d6a-c0b03d18148c for instance with vm_state building and task_state spawning. [ 847.282066] env[61962]: DEBUG nova.network.neutron [None req-39280a80-f6a9-4818-9c77-d5be7d130e72 tempest-ServersTestFqdnHostnames-1235394237 tempest-ServersTestFqdnHostnames-1235394237-project-member] [instance: bf82c44c-95ce-4e4c-a32f-444bba1ef5cd] Successfully updated port: 331f0490-3834-4c36-8d6a-c0b03d18148c {{(pid=61962) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 847.298126] env[61962]: DEBUG oslo_concurrency.lockutils [None req-39280a80-f6a9-4818-9c77-d5be7d130e72 tempest-ServersTestFqdnHostnames-1235394237 tempest-ServersTestFqdnHostnames-1235394237-project-member] Acquiring lock "refresh_cache-bf82c44c-95ce-4e4c-a32f-444bba1ef5cd" {{(pid=61962) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 847.301752] env[61962]: DEBUG oslo_concurrency.lockutils [None req-39280a80-f6a9-4818-9c77-d5be7d130e72 tempest-ServersTestFqdnHostnames-1235394237 tempest-ServersTestFqdnHostnames-1235394237-project-member] Acquired lock "refresh_cache-bf82c44c-95ce-4e4c-a32f-444bba1ef5cd" {{(pid=61962) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 847.302056] env[61962]: DEBUG nova.network.neutron [None req-39280a80-f6a9-4818-9c77-d5be7d130e72 tempest-ServersTestFqdnHostnames-1235394237 tempest-ServersTestFqdnHostnames-1235394237-project-member] [instance: bf82c44c-95ce-4e4c-a32f-444bba1ef5cd] Building network info cache for instance {{(pid=61962) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 847.395769] env[61962]: DEBUG nova.network.neutron [None req-39280a80-f6a9-4818-9c77-d5be7d130e72 tempest-ServersTestFqdnHostnames-1235394237 tempest-ServersTestFqdnHostnames-1235394237-project-member] [instance: bf82c44c-95ce-4e4c-a32f-444bba1ef5cd] Instance cache missing network info. {{(pid=61962) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 847.991368] env[61962]: DEBUG nova.network.neutron [None req-39280a80-f6a9-4818-9c77-d5be7d130e72 tempest-ServersTestFqdnHostnames-1235394237 tempest-ServersTestFqdnHostnames-1235394237-project-member] [instance: bf82c44c-95ce-4e4c-a32f-444bba1ef5cd] Updating instance_info_cache with network_info: [{"id": "331f0490-3834-4c36-8d6a-c0b03d18148c", "address": "fa:16:3e:d3:82:5b", "network": {"id": "675dfe37-c6f4-4ca2-9412-5fd9e5d66a99", "bridge": "br-int", "label": "tempest-ServersTestFqdnHostnames-1130110711-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "389fb8d4c2ce47829fe23a6ca60e9bdd", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "2b931c4c-f73c-4fbd-9c9f-0270834cc69e", "external-id": "nsx-vlan-transportzone-177", "segmentation_id": 177, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap331f0490-38", "ovs_interfaceid": "331f0490-3834-4c36-8d6a-c0b03d18148c", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61962) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 848.008261] env[61962]: DEBUG oslo_concurrency.lockutils [None req-39280a80-f6a9-4818-9c77-d5be7d130e72 tempest-ServersTestFqdnHostnames-1235394237 tempest-ServersTestFqdnHostnames-1235394237-project-member] Releasing lock "refresh_cache-bf82c44c-95ce-4e4c-a32f-444bba1ef5cd" {{(pid=61962) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 848.008569] env[61962]: DEBUG nova.compute.manager [None req-39280a80-f6a9-4818-9c77-d5be7d130e72 tempest-ServersTestFqdnHostnames-1235394237 tempest-ServersTestFqdnHostnames-1235394237-project-member] [instance: bf82c44c-95ce-4e4c-a32f-444bba1ef5cd] Instance network_info: |[{"id": "331f0490-3834-4c36-8d6a-c0b03d18148c", "address": "fa:16:3e:d3:82:5b", "network": {"id": "675dfe37-c6f4-4ca2-9412-5fd9e5d66a99", "bridge": "br-int", "label": "tempest-ServersTestFqdnHostnames-1130110711-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "389fb8d4c2ce47829fe23a6ca60e9bdd", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "2b931c4c-f73c-4fbd-9c9f-0270834cc69e", "external-id": "nsx-vlan-transportzone-177", "segmentation_id": 177, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap331f0490-38", "ovs_interfaceid": "331f0490-3834-4c36-8d6a-c0b03d18148c", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=61962) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 848.008999] env[61962]: DEBUG nova.virt.vmwareapi.vmops [None req-39280a80-f6a9-4818-9c77-d5be7d130e72 tempest-ServersTestFqdnHostnames-1235394237 tempest-ServersTestFqdnHostnames-1235394237-project-member] [instance: bf82c44c-95ce-4e4c-a32f-444bba1ef5cd] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:d3:82:5b', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '2b931c4c-f73c-4fbd-9c9f-0270834cc69e', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '331f0490-3834-4c36-8d6a-c0b03d18148c', 'vif_model': 'vmxnet3'}] {{(pid=61962) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 848.018028] env[61962]: DEBUG nova.virt.vmwareapi.vm_util [None req-39280a80-f6a9-4818-9c77-d5be7d130e72 tempest-ServersTestFqdnHostnames-1235394237 tempest-ServersTestFqdnHostnames-1235394237-project-member] Creating folder: Project (389fb8d4c2ce47829fe23a6ca60e9bdd). Parent ref: group-v953327. {{(pid=61962) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 848.018028] env[61962]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-2b02fdc7-33aa-46a3-8566-6e825f213f2d {{(pid=61962) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 848.030879] env[61962]: INFO nova.virt.vmwareapi.vm_util [None req-39280a80-f6a9-4818-9c77-d5be7d130e72 tempest-ServersTestFqdnHostnames-1235394237 tempest-ServersTestFqdnHostnames-1235394237-project-member] Created folder: Project (389fb8d4c2ce47829fe23a6ca60e9bdd) in parent group-v953327. [ 848.031107] env[61962]: DEBUG nova.virt.vmwareapi.vm_util [None req-39280a80-f6a9-4818-9c77-d5be7d130e72 tempest-ServersTestFqdnHostnames-1235394237 tempest-ServersTestFqdnHostnames-1235394237-project-member] Creating folder: Instances. Parent ref: group-v953375. {{(pid=61962) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 848.031364] env[61962]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-b7ac4b86-a057-4b9b-b258-543964e38d19 {{(pid=61962) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 848.042446] env[61962]: INFO nova.virt.vmwareapi.vm_util [None req-39280a80-f6a9-4818-9c77-d5be7d130e72 tempest-ServersTestFqdnHostnames-1235394237 tempest-ServersTestFqdnHostnames-1235394237-project-member] Created folder: Instances in parent group-v953375. [ 848.042742] env[61962]: DEBUG oslo.service.loopingcall [None req-39280a80-f6a9-4818-9c77-d5be7d130e72 tempest-ServersTestFqdnHostnames-1235394237 tempest-ServersTestFqdnHostnames-1235394237-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=61962) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 848.042961] env[61962]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: bf82c44c-95ce-4e4c-a32f-444bba1ef5cd] Creating VM on the ESX host {{(pid=61962) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 848.043177] env[61962]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-f3e9ea4c-b64d-41bf-a38e-730d7478e9bf {{(pid=61962) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 848.068022] env[61962]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 848.068022] env[61962]: value = "task-4891973" [ 848.068022] env[61962]: _type = "Task" [ 848.068022] env[61962]: } to complete. {{(pid=61962) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 848.078259] env[61962]: DEBUG oslo_vmware.api [-] Task: {'id': task-4891973, 'name': CreateVM_Task} progress is 0%. {{(pid=61962) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 848.579255] env[61962]: DEBUG oslo_vmware.api [-] Task: {'id': task-4891973, 'name': CreateVM_Task, 'duration_secs': 0.327701} completed successfully. {{(pid=61962) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 848.579601] env[61962]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: bf82c44c-95ce-4e4c-a32f-444bba1ef5cd] Created VM on the ESX host {{(pid=61962) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 848.580204] env[61962]: DEBUG oslo_concurrency.lockutils [None req-39280a80-f6a9-4818-9c77-d5be7d130e72 tempest-ServersTestFqdnHostnames-1235394237 tempest-ServersTestFqdnHostnames-1235394237-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/f684bb17-3ab2-4bd5-a19e-4c36c57d0ebe" {{(pid=61962) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 848.580423] env[61962]: DEBUG oslo_concurrency.lockutils [None req-39280a80-f6a9-4818-9c77-d5be7d130e72 tempest-ServersTestFqdnHostnames-1235394237 tempest-ServersTestFqdnHostnames-1235394237-project-member] Acquired lock "[datastore2] devstack-image-cache_base/f684bb17-3ab2-4bd5-a19e-4c36c57d0ebe" {{(pid=61962) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 848.580789] env[61962]: DEBUG oslo_concurrency.lockutils [None req-39280a80-f6a9-4818-9c77-d5be7d130e72 tempest-ServersTestFqdnHostnames-1235394237 tempest-ServersTestFqdnHostnames-1235394237-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/f684bb17-3ab2-4bd5-a19e-4c36c57d0ebe" {{(pid=61962) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 848.581055] env[61962]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-e891235b-b352-438c-b5f2-5213b9cedcac {{(pid=61962) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 848.586550] env[61962]: DEBUG oslo_vmware.api [None req-39280a80-f6a9-4818-9c77-d5be7d130e72 tempest-ServersTestFqdnHostnames-1235394237 tempest-ServersTestFqdnHostnames-1235394237-project-member] Waiting for the task: (returnval){ [ 848.586550] env[61962]: value = "session[5210918f-aadc-9b29-42fa-0929c2e42627]5281ce0f-10ac-0817-4cd1-25bb4b0afc70" [ 848.586550] env[61962]: _type = "Task" [ 848.586550] env[61962]: } to complete. {{(pid=61962) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 848.595305] env[61962]: DEBUG oslo_vmware.api [None req-39280a80-f6a9-4818-9c77-d5be7d130e72 tempest-ServersTestFqdnHostnames-1235394237 tempest-ServersTestFqdnHostnames-1235394237-project-member] Task: {'id': session[5210918f-aadc-9b29-42fa-0929c2e42627]5281ce0f-10ac-0817-4cd1-25bb4b0afc70, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61962) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 849.099864] env[61962]: DEBUG oslo_concurrency.lockutils [None req-39280a80-f6a9-4818-9c77-d5be7d130e72 tempest-ServersTestFqdnHostnames-1235394237 tempest-ServersTestFqdnHostnames-1235394237-project-member] Releasing lock "[datastore2] devstack-image-cache_base/f684bb17-3ab2-4bd5-a19e-4c36c57d0ebe" {{(pid=61962) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 849.100618] env[61962]: DEBUG nova.virt.vmwareapi.vmops [None req-39280a80-f6a9-4818-9c77-d5be7d130e72 tempest-ServersTestFqdnHostnames-1235394237 tempest-ServersTestFqdnHostnames-1235394237-project-member] [instance: bf82c44c-95ce-4e4c-a32f-444bba1ef5cd] Processing image f684bb17-3ab2-4bd5-a19e-4c36c57d0ebe {{(pid=61962) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 849.100907] env[61962]: DEBUG oslo_concurrency.lockutils [None req-39280a80-f6a9-4818-9c77-d5be7d130e72 tempest-ServersTestFqdnHostnames-1235394237 tempest-ServersTestFqdnHostnames-1235394237-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/f684bb17-3ab2-4bd5-a19e-4c36c57d0ebe/f684bb17-3ab2-4bd5-a19e-4c36c57d0ebe.vmdk" {{(pid=61962) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 849.196367] env[61962]: DEBUG nova.compute.manager [req-ec2ccd35-7920-4d44-a368-4708e2afd3ec req-23100661-c2c5-4563-94a9-7dcf55031fbb service nova] [instance: bf82c44c-95ce-4e4c-a32f-444bba1ef5cd] Received event network-changed-331f0490-3834-4c36-8d6a-c0b03d18148c {{(pid=61962) external_instance_event /opt/stack/nova/nova/compute/manager.py:11210}} [ 849.196570] env[61962]: DEBUG nova.compute.manager [req-ec2ccd35-7920-4d44-a368-4708e2afd3ec req-23100661-c2c5-4563-94a9-7dcf55031fbb service nova] [instance: bf82c44c-95ce-4e4c-a32f-444bba1ef5cd] Refreshing instance network info cache due to event network-changed-331f0490-3834-4c36-8d6a-c0b03d18148c. {{(pid=61962) external_instance_event /opt/stack/nova/nova/compute/manager.py:11215}} [ 849.196776] env[61962]: DEBUG oslo_concurrency.lockutils [req-ec2ccd35-7920-4d44-a368-4708e2afd3ec req-23100661-c2c5-4563-94a9-7dcf55031fbb service nova] Acquiring lock "refresh_cache-bf82c44c-95ce-4e4c-a32f-444bba1ef5cd" {{(pid=61962) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 849.196915] env[61962]: DEBUG oslo_concurrency.lockutils [req-ec2ccd35-7920-4d44-a368-4708e2afd3ec req-23100661-c2c5-4563-94a9-7dcf55031fbb service nova] Acquired lock "refresh_cache-bf82c44c-95ce-4e4c-a32f-444bba1ef5cd" {{(pid=61962) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 849.197151] env[61962]: DEBUG nova.network.neutron [req-ec2ccd35-7920-4d44-a368-4708e2afd3ec req-23100661-c2c5-4563-94a9-7dcf55031fbb service nova] [instance: bf82c44c-95ce-4e4c-a32f-444bba1ef5cd] Refreshing network info cache for port 331f0490-3834-4c36-8d6a-c0b03d18148c {{(pid=61962) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 849.652709] env[61962]: DEBUG nova.network.neutron [req-ec2ccd35-7920-4d44-a368-4708e2afd3ec req-23100661-c2c5-4563-94a9-7dcf55031fbb service nova] [instance: bf82c44c-95ce-4e4c-a32f-444bba1ef5cd] Updated VIF entry in instance network info cache for port 331f0490-3834-4c36-8d6a-c0b03d18148c. {{(pid=61962) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 849.653083] env[61962]: DEBUG nova.network.neutron [req-ec2ccd35-7920-4d44-a368-4708e2afd3ec req-23100661-c2c5-4563-94a9-7dcf55031fbb service nova] [instance: bf82c44c-95ce-4e4c-a32f-444bba1ef5cd] Updating instance_info_cache with network_info: [{"id": "331f0490-3834-4c36-8d6a-c0b03d18148c", "address": "fa:16:3e:d3:82:5b", "network": {"id": "675dfe37-c6f4-4ca2-9412-5fd9e5d66a99", "bridge": "br-int", "label": "tempest-ServersTestFqdnHostnames-1130110711-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "389fb8d4c2ce47829fe23a6ca60e9bdd", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "2b931c4c-f73c-4fbd-9c9f-0270834cc69e", "external-id": "nsx-vlan-transportzone-177", "segmentation_id": 177, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap331f0490-38", "ovs_interfaceid": "331f0490-3834-4c36-8d6a-c0b03d18148c", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61962) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 849.662981] env[61962]: DEBUG oslo_concurrency.lockutils [req-ec2ccd35-7920-4d44-a368-4708e2afd3ec req-23100661-c2c5-4563-94a9-7dcf55031fbb service nova] Releasing lock "refresh_cache-bf82c44c-95ce-4e4c-a32f-444bba1ef5cd" {{(pid=61962) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 853.327543] env[61962]: DEBUG oslo_concurrency.lockutils [None req-4d8c264b-31b7-4f00-920d-85037918abcd tempest-ServersTestFqdnHostnames-1235394237 tempest-ServersTestFqdnHostnames-1235394237-project-member] Acquiring lock "bf82c44c-95ce-4e4c-a32f-444bba1ef5cd" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=61962) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 859.153931] env[61962]: DEBUG oslo_service.periodic_task [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Running periodic task ComputeManager._sync_scheduler_instance_info {{(pid=61962) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 860.158579] env[61962]: DEBUG oslo_service.periodic_task [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=61962) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 860.158863] env[61962]: DEBUG nova.compute.manager [None req-ba337279-4320-40ca-b616-7e590432f203 None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=61962) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10626}} [ 860.158969] env[61962]: DEBUG oslo_service.periodic_task [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Running periodic task ComputeManager.update_available_resource {{(pid=61962) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 860.171177] env[61962]: DEBUG oslo_concurrency.lockutils [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=61962) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 860.171177] env[61962]: DEBUG oslo_concurrency.lockutils [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=61962) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 860.171364] env[61962]: DEBUG oslo_concurrency.lockutils [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=61962) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 860.171448] env[61962]: DEBUG nova.compute.resource_tracker [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=61962) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 860.172641] env[61962]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9eb3a6f8-a1cd-4721-9647-fb9ab563e9c0 {{(pid=61962) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 860.182679] env[61962]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-386f39cc-a20c-4602-9e04-0eb35f3c67aa {{(pid=61962) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 860.197412] env[61962]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1e5d25aa-84c8-4e58-a726-07343ba97482 {{(pid=61962) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 860.204531] env[61962]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bf0a3ce8-10f8-43c0-9856-b04349d34f63 {{(pid=61962) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 860.234820] env[61962]: DEBUG nova.compute.resource_tracker [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=180567MB free_disk=96GB free_vcpus=48 pci_devices=None {{(pid=61962) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 860.234988] env[61962]: DEBUG oslo_concurrency.lockutils [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=61962) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 860.235191] env[61962]: DEBUG oslo_concurrency.lockutils [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=61962) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 860.312929] env[61962]: DEBUG nova.compute.resource_tracker [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Instance d0df932e-822b-4c60-a588-445aa2f14cdb actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61962) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 860.313149] env[61962]: DEBUG nova.compute.resource_tracker [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Instance a6e6f30d-c667-4f70-bf60-596a8fe97188 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61962) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 860.313321] env[61962]: DEBUG nova.compute.resource_tracker [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Instance b9782756-27e1-46fc-940f-069cfb9ab9f9 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61962) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 860.313482] env[61962]: DEBUG nova.compute.resource_tracker [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Instance 667f56c0-896f-4e25-9e8a-3954f7733dc0 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61962) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 860.313638] env[61962]: DEBUG nova.compute.resource_tracker [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Instance 295064f0-9677-4287-877a-ef2a33a87fef actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61962) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 860.313831] env[61962]: DEBUG nova.compute.resource_tracker [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Instance 6996289c-f881-4733-8943-98e3633e43ca actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61962) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 860.313952] env[61962]: DEBUG nova.compute.resource_tracker [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Instance 85c6b6af-f1b5-47ca-8e02-61c3669ec02b actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61962) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 860.314123] env[61962]: DEBUG nova.compute.resource_tracker [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Instance 0aa340f7-a219-47f5-9d7d-dd4062643cf7 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61962) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 860.314342] env[61962]: DEBUG nova.compute.resource_tracker [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Instance ff7d9da5-bc9a-41f2-a2e4-521d87b22117 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61962) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 860.314503] env[61962]: DEBUG nova.compute.resource_tracker [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Instance bf82c44c-95ce-4e4c-a32f-444bba1ef5cd actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61962) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 860.327880] env[61962]: DEBUG nova.compute.resource_tracker [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Instance 1dcafe3b-989b-4290-be54-ceccc62a323f has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61962) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1767}} [ 860.341166] env[61962]: DEBUG nova.compute.resource_tracker [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Instance 3bbd6215-436e-4d30-9c07-b3f69002f3bf has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61962) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1767}} [ 860.351869] env[61962]: DEBUG nova.compute.resource_tracker [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Instance 454eebcc-ea18-4046-ba2e-01de1c8a954a has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61962) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1767}} [ 860.363458] env[61962]: DEBUG nova.compute.resource_tracker [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Instance e76b5908-3bad-4c19-90e7-b15ca253ad80 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61962) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1767}} [ 860.373936] env[61962]: DEBUG nova.compute.resource_tracker [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Instance c17b6733-edea-4db9-b937-c38585eb92bf has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61962) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1767}} [ 860.386314] env[61962]: DEBUG nova.compute.resource_tracker [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Instance 1228fef1-3837-4a1b-8c88-fc1e27af6b4f has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61962) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1767}} [ 860.398520] env[61962]: DEBUG nova.compute.resource_tracker [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Instance 66eb1344-b692-4f1c-a5ac-b3518e350741 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61962) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1767}} [ 860.409562] env[61962]: DEBUG nova.compute.resource_tracker [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Instance b33ecac7-cb2f-438c-9ee4-7cd53388e318 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61962) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1767}} [ 860.422446] env[61962]: DEBUG nova.compute.resource_tracker [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Instance 252f4add-476d-46bc-b369-c8d66a512142 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61962) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1767}} [ 860.447712] env[61962]: DEBUG nova.compute.resource_tracker [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Instance 647d141e-1aaf-4e5e-b0a3-31c9bee0adf7 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61962) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1767}} [ 860.459977] env[61962]: DEBUG nova.compute.resource_tracker [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Instance fb33811b-242a-4cce-ae11-c004e507e7de has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61962) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1767}} [ 860.472067] env[61962]: DEBUG nova.compute.resource_tracker [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Instance c5eca2f9-f569-4510-882b-217c2c28ec69 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61962) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1767}} [ 860.485351] env[61962]: DEBUG nova.compute.resource_tracker [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Instance 684d7213-d30a-4881-8e13-9817d6765074 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61962) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1767}} [ 860.496805] env[61962]: DEBUG nova.compute.resource_tracker [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Instance 9cf159b3-53c8-4002-9e9b-5c0f82e1e555 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61962) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1767}} [ 860.507504] env[61962]: DEBUG nova.compute.resource_tracker [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Instance 306d093c-40fa-4863-b725-b3fc65042dce has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61962) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1767}} [ 860.518885] env[61962]: DEBUG nova.compute.resource_tracker [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Instance f53ed31d-4cca-418d-b708-4797dbc5920f has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61962) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1767}} [ 860.529377] env[61962]: DEBUG nova.compute.resource_tracker [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Instance 8ecd73a7-7f23-4994-85b9-479584831687 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61962) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1767}} [ 860.529628] env[61962]: DEBUG nova.compute.resource_tracker [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Total usable vcpus: 48, total allocated vcpus: 10 {{(pid=61962) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 860.529788] env[61962]: DEBUG nova.compute.resource_tracker [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=1792MB phys_disk=100GB used_disk=10GB total_vcpus=48 used_vcpus=10 pci_stats=[] stats={'failed_builds': '23', 'num_instances': '10', 'num_vm_building': '10', 'num_task_deleting': '10', 'num_os_type_None': '10', 'num_proj_e58b68272c404983993a09ad12e8c03b': '1', 'io_workload': '10', 'num_proj_6e7e747becdf4976b570b79d4b20d88b': '1', 'num_proj_a7b748ae1e6d4e17bc463811008c5596': '1', 'num_proj_6c00b1a76e464ad2a9788673e256de22': '1', 'num_proj_78e44aa9cd5642928627210447b3e177': '1', 'num_proj_b4cf0a2155644882b4bf42e69e210138': '1', 'num_proj_ed943c69c73d48e2b1b2bab311295d84': '2', 'num_proj_5481169540de4205a1ff7731132eb3a7': '1', 'num_proj_389fb8d4c2ce47829fe23a6ca60e9bdd': '1'} {{(pid=61962) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 860.872111] env[61962]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-161a507e-e3f2-48a4-89dd-a0bd181c671d {{(pid=61962) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 860.880028] env[61962]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9bd7f2ec-014f-49aa-bb3e-072d6044cb91 {{(pid=61962) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 860.916814] env[61962]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-09cbf498-7af1-4a2f-95a6-ee3b35512167 {{(pid=61962) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 860.924683] env[61962]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8377684f-4bc1-47c8-bbdc-7ae4c66f1a6f {{(pid=61962) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 860.938520] env[61962]: DEBUG nova.compute.provider_tree [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Inventory has not changed in ProviderTree for provider: 5a20dc57-fddd-49ec-bab5-953a03eebaa1 {{(pid=61962) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 860.949503] env[61962]: DEBUG nova.scheduler.client.report [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Inventory has not changed for provider 5a20dc57-fddd-49ec-bab5-953a03eebaa1 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 96, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61962) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 860.965130] env[61962]: DEBUG nova.compute.resource_tracker [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=61962) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 860.965765] env[61962]: DEBUG oslo_concurrency.lockutils [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 0.730s {{(pid=61962) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 862.960261] env[61962]: DEBUG oslo_service.periodic_task [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=61962) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 863.158600] env[61962]: DEBUG oslo_service.periodic_task [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=61962) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 863.158860] env[61962]: DEBUG oslo_service.periodic_task [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=61962) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 865.158476] env[61962]: DEBUG oslo_service.periodic_task [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=61962) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 865.158824] env[61962]: DEBUG nova.compute.manager [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Starting heal instance info cache {{(pid=61962) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10007}} [ 865.158824] env[61962]: DEBUG nova.compute.manager [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Rebuilding the list of instances to heal {{(pid=61962) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10011}} [ 865.182202] env[61962]: DEBUG nova.compute.manager [None req-ba337279-4320-40ca-b616-7e590432f203 None None] [instance: d0df932e-822b-4c60-a588-445aa2f14cdb] Skipping network cache update for instance because it is Building. {{(pid=61962) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10020}} [ 865.182532] env[61962]: DEBUG nova.compute.manager [None req-ba337279-4320-40ca-b616-7e590432f203 None None] [instance: a6e6f30d-c667-4f70-bf60-596a8fe97188] Skipping network cache update for instance because it is Building. {{(pid=61962) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10020}} [ 865.182610] env[61962]: DEBUG nova.compute.manager [None req-ba337279-4320-40ca-b616-7e590432f203 None None] [instance: b9782756-27e1-46fc-940f-069cfb9ab9f9] Skipping network cache update for instance because it is Building. {{(pid=61962) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10020}} [ 865.182738] env[61962]: DEBUG nova.compute.manager [None req-ba337279-4320-40ca-b616-7e590432f203 None None] [instance: 667f56c0-896f-4e25-9e8a-3954f7733dc0] Skipping network cache update for instance because it is Building. {{(pid=61962) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10020}} [ 865.182861] env[61962]: DEBUG nova.compute.manager [None req-ba337279-4320-40ca-b616-7e590432f203 None None] [instance: 295064f0-9677-4287-877a-ef2a33a87fef] Skipping network cache update for instance because it is Building. {{(pid=61962) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10020}} [ 865.182984] env[61962]: DEBUG nova.compute.manager [None req-ba337279-4320-40ca-b616-7e590432f203 None None] [instance: 6996289c-f881-4733-8943-98e3633e43ca] Skipping network cache update for instance because it is Building. {{(pid=61962) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10020}} [ 865.183119] env[61962]: DEBUG nova.compute.manager [None req-ba337279-4320-40ca-b616-7e590432f203 None None] [instance: 85c6b6af-f1b5-47ca-8e02-61c3669ec02b] Skipping network cache update for instance because it is Building. {{(pid=61962) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10020}} [ 865.183242] env[61962]: DEBUG nova.compute.manager [None req-ba337279-4320-40ca-b616-7e590432f203 None None] [instance: 0aa340f7-a219-47f5-9d7d-dd4062643cf7] Skipping network cache update for instance because it is Building. {{(pid=61962) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10020}} [ 865.183361] env[61962]: DEBUG nova.compute.manager [None req-ba337279-4320-40ca-b616-7e590432f203 None None] [instance: ff7d9da5-bc9a-41f2-a2e4-521d87b22117] Skipping network cache update for instance because it is Building. {{(pid=61962) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10020}} [ 865.183530] env[61962]: DEBUG nova.compute.manager [None req-ba337279-4320-40ca-b616-7e590432f203 None None] [instance: bf82c44c-95ce-4e4c-a32f-444bba1ef5cd] Skipping network cache update for instance because it is Building. {{(pid=61962) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10020}} [ 865.183592] env[61962]: DEBUG nova.compute.manager [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Didn't find any instances for network info cache update. {{(pid=61962) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10093}} [ 866.158187] env[61962]: DEBUG oslo_service.periodic_task [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=61962) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 866.158353] env[61962]: DEBUG oslo_service.periodic_task [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=61962) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 866.158498] env[61962]: DEBUG oslo_service.periodic_task [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=61962) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 888.263705] env[61962]: WARNING oslo_vmware.rw_handles [None req-911f86c7-75e1-4661-8b4b-23bb532c1f1c tempest-ImagesTestJSON-240028984 tempest-ImagesTestJSON-240028984-project-member] Error occurred while reading the HTTP response.: http.client.RemoteDisconnected: Remote end closed connection without response [ 888.263705] env[61962]: ERROR oslo_vmware.rw_handles Traceback (most recent call last): [ 888.263705] env[61962]: ERROR oslo_vmware.rw_handles File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py", line 283, in close [ 888.263705] env[61962]: ERROR oslo_vmware.rw_handles self._conn.getresponse() [ 888.263705] env[61962]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 1375, in getresponse [ 888.263705] env[61962]: ERROR oslo_vmware.rw_handles response.begin() [ 888.263705] env[61962]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 318, in begin [ 888.263705] env[61962]: ERROR oslo_vmware.rw_handles version, status, reason = self._read_status() [ 888.263705] env[61962]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 287, in _read_status [ 888.263705] env[61962]: ERROR oslo_vmware.rw_handles raise RemoteDisconnected("Remote end closed connection without" [ 888.263705] env[61962]: ERROR oslo_vmware.rw_handles http.client.RemoteDisconnected: Remote end closed connection without response [ 888.263705] env[61962]: ERROR oslo_vmware.rw_handles [ 888.264488] env[61962]: DEBUG nova.virt.vmwareapi.images [None req-911f86c7-75e1-4661-8b4b-23bb532c1f1c tempest-ImagesTestJSON-240028984 tempest-ImagesTestJSON-240028984-project-member] [instance: d0df932e-822b-4c60-a588-445aa2f14cdb] Downloaded image file data f684bb17-3ab2-4bd5-a19e-4c36c57d0ebe to vmware_temp/0ac47960-00b8-47af-b37f-3431a98ae349/f684bb17-3ab2-4bd5-a19e-4c36c57d0ebe/tmp-sparse.vmdk on the data store datastore2 {{(pid=61962) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:258}} [ 888.266297] env[61962]: DEBUG nova.virt.vmwareapi.vmops [None req-911f86c7-75e1-4661-8b4b-23bb532c1f1c tempest-ImagesTestJSON-240028984 tempest-ImagesTestJSON-240028984-project-member] [instance: d0df932e-822b-4c60-a588-445aa2f14cdb] Caching image {{(pid=61962) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 888.266655] env[61962]: DEBUG nova.virt.vmwareapi.vm_util [None req-911f86c7-75e1-4661-8b4b-23bb532c1f1c tempest-ImagesTestJSON-240028984 tempest-ImagesTestJSON-240028984-project-member] Copying Virtual Disk [datastore2] vmware_temp/0ac47960-00b8-47af-b37f-3431a98ae349/f684bb17-3ab2-4bd5-a19e-4c36c57d0ebe/tmp-sparse.vmdk to [datastore2] vmware_temp/0ac47960-00b8-47af-b37f-3431a98ae349/f684bb17-3ab2-4bd5-a19e-4c36c57d0ebe/f684bb17-3ab2-4bd5-a19e-4c36c57d0ebe.vmdk {{(pid=61962) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 888.266983] env[61962]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-35449bdf-004d-4e2c-801b-4d9bf5abf613 {{(pid=61962) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 888.275227] env[61962]: DEBUG oslo_vmware.api [None req-911f86c7-75e1-4661-8b4b-23bb532c1f1c tempest-ImagesTestJSON-240028984 tempest-ImagesTestJSON-240028984-project-member] Waiting for the task: (returnval){ [ 888.275227] env[61962]: value = "task-4891974" [ 888.275227] env[61962]: _type = "Task" [ 888.275227] env[61962]: } to complete. {{(pid=61962) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 888.283831] env[61962]: DEBUG oslo_vmware.api [None req-911f86c7-75e1-4661-8b4b-23bb532c1f1c tempest-ImagesTestJSON-240028984 tempest-ImagesTestJSON-240028984-project-member] Task: {'id': task-4891974, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61962) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 888.787518] env[61962]: DEBUG oslo_vmware.exceptions [None req-911f86c7-75e1-4661-8b4b-23bb532c1f1c tempest-ImagesTestJSON-240028984 tempest-ImagesTestJSON-240028984-project-member] Fault InvalidArgument not matched. {{(pid=61962) get_fault_class /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/exceptions.py:290}} [ 888.788629] env[61962]: DEBUG oslo_concurrency.lockutils [None req-911f86c7-75e1-4661-8b4b-23bb532c1f1c tempest-ImagesTestJSON-240028984 tempest-ImagesTestJSON-240028984-project-member] Releasing lock "[datastore2] devstack-image-cache_base/f684bb17-3ab2-4bd5-a19e-4c36c57d0ebe/f684bb17-3ab2-4bd5-a19e-4c36c57d0ebe.vmdk" {{(pid=61962) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 888.788629] env[61962]: ERROR nova.compute.manager [None req-911f86c7-75e1-4661-8b4b-23bb532c1f1c tempest-ImagesTestJSON-240028984 tempest-ImagesTestJSON-240028984-project-member] [instance: d0df932e-822b-4c60-a588-445aa2f14cdb] Instance failed to spawn: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 888.788629] env[61962]: Faults: ['InvalidArgument'] [ 888.788629] env[61962]: ERROR nova.compute.manager [instance: d0df932e-822b-4c60-a588-445aa2f14cdb] Traceback (most recent call last): [ 888.788629] env[61962]: ERROR nova.compute.manager [instance: d0df932e-822b-4c60-a588-445aa2f14cdb] File "/opt/stack/nova/nova/compute/manager.py", line 2886, in _build_resources [ 888.788629] env[61962]: ERROR nova.compute.manager [instance: d0df932e-822b-4c60-a588-445aa2f14cdb] yield resources [ 888.788629] env[61962]: ERROR nova.compute.manager [instance: d0df932e-822b-4c60-a588-445aa2f14cdb] File "/opt/stack/nova/nova/compute/manager.py", line 2633, in _build_and_run_instance [ 888.788629] env[61962]: ERROR nova.compute.manager [instance: d0df932e-822b-4c60-a588-445aa2f14cdb] self.driver.spawn(context, instance, image_meta, [ 888.788629] env[61962]: ERROR nova.compute.manager [instance: d0df932e-822b-4c60-a588-445aa2f14cdb] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 888.788629] env[61962]: ERROR nova.compute.manager [instance: d0df932e-822b-4c60-a588-445aa2f14cdb] self._vmops.spawn(context, instance, image_meta, injected_files, [ 888.789101] env[61962]: ERROR nova.compute.manager [instance: d0df932e-822b-4c60-a588-445aa2f14cdb] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 888.789101] env[61962]: ERROR nova.compute.manager [instance: d0df932e-822b-4c60-a588-445aa2f14cdb] self._fetch_image_if_missing(context, vi) [ 888.789101] env[61962]: ERROR nova.compute.manager [instance: d0df932e-822b-4c60-a588-445aa2f14cdb] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 888.789101] env[61962]: ERROR nova.compute.manager [instance: d0df932e-822b-4c60-a588-445aa2f14cdb] image_cache(vi, tmp_image_ds_loc) [ 888.789101] env[61962]: ERROR nova.compute.manager [instance: d0df932e-822b-4c60-a588-445aa2f14cdb] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 888.789101] env[61962]: ERROR nova.compute.manager [instance: d0df932e-822b-4c60-a588-445aa2f14cdb] vm_util.copy_virtual_disk( [ 888.789101] env[61962]: ERROR nova.compute.manager [instance: d0df932e-822b-4c60-a588-445aa2f14cdb] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 888.789101] env[61962]: ERROR nova.compute.manager [instance: d0df932e-822b-4c60-a588-445aa2f14cdb] session._wait_for_task(vmdk_copy_task) [ 888.789101] env[61962]: ERROR nova.compute.manager [instance: d0df932e-822b-4c60-a588-445aa2f14cdb] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 888.789101] env[61962]: ERROR nova.compute.manager [instance: d0df932e-822b-4c60-a588-445aa2f14cdb] return self.wait_for_task(task_ref) [ 888.789101] env[61962]: ERROR nova.compute.manager [instance: d0df932e-822b-4c60-a588-445aa2f14cdb] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 888.789101] env[61962]: ERROR nova.compute.manager [instance: d0df932e-822b-4c60-a588-445aa2f14cdb] return evt.wait() [ 888.789101] env[61962]: ERROR nova.compute.manager [instance: d0df932e-822b-4c60-a588-445aa2f14cdb] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 888.789604] env[61962]: ERROR nova.compute.manager [instance: d0df932e-822b-4c60-a588-445aa2f14cdb] result = hub.switch() [ 888.789604] env[61962]: ERROR nova.compute.manager [instance: d0df932e-822b-4c60-a588-445aa2f14cdb] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 888.789604] env[61962]: ERROR nova.compute.manager [instance: d0df932e-822b-4c60-a588-445aa2f14cdb] return self.greenlet.switch() [ 888.789604] env[61962]: ERROR nova.compute.manager [instance: d0df932e-822b-4c60-a588-445aa2f14cdb] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 888.789604] env[61962]: ERROR nova.compute.manager [instance: d0df932e-822b-4c60-a588-445aa2f14cdb] self.f(*self.args, **self.kw) [ 888.789604] env[61962]: ERROR nova.compute.manager [instance: d0df932e-822b-4c60-a588-445aa2f14cdb] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 888.789604] env[61962]: ERROR nova.compute.manager [instance: d0df932e-822b-4c60-a588-445aa2f14cdb] raise exceptions.translate_fault(task_info.error) [ 888.789604] env[61962]: ERROR nova.compute.manager [instance: d0df932e-822b-4c60-a588-445aa2f14cdb] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 888.789604] env[61962]: ERROR nova.compute.manager [instance: d0df932e-822b-4c60-a588-445aa2f14cdb] Faults: ['InvalidArgument'] [ 888.789604] env[61962]: ERROR nova.compute.manager [instance: d0df932e-822b-4c60-a588-445aa2f14cdb] [ 888.789604] env[61962]: INFO nova.compute.manager [None req-911f86c7-75e1-4661-8b4b-23bb532c1f1c tempest-ImagesTestJSON-240028984 tempest-ImagesTestJSON-240028984-project-member] [instance: d0df932e-822b-4c60-a588-445aa2f14cdb] Terminating instance [ 888.791045] env[61962]: DEBUG oslo_concurrency.lockutils [None req-3f485ca9-0fd9-450d-ac9a-a3575d3edc94 tempest-ServersAdmin275Test-677477004 tempest-ServersAdmin275Test-677477004-project-member] Acquired lock "[datastore2] devstack-image-cache_base/f684bb17-3ab2-4bd5-a19e-4c36c57d0ebe/f684bb17-3ab2-4bd5-a19e-4c36c57d0ebe.vmdk" {{(pid=61962) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 888.791270] env[61962]: DEBUG nova.virt.vmwareapi.ds_util [None req-3f485ca9-0fd9-450d-ac9a-a3575d3edc94 tempest-ServersAdmin275Test-677477004 tempest-ServersAdmin275Test-677477004-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=61962) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 888.791518] env[61962]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-62eb9896-fbf8-4134-b2da-9aeda41b4329 {{(pid=61962) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 888.794470] env[61962]: DEBUG nova.compute.manager [None req-911f86c7-75e1-4661-8b4b-23bb532c1f1c tempest-ImagesTestJSON-240028984 tempest-ImagesTestJSON-240028984-project-member] [instance: d0df932e-822b-4c60-a588-445aa2f14cdb] Start destroying the instance on the hypervisor. {{(pid=61962) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3142}} [ 888.794569] env[61962]: DEBUG nova.virt.vmwareapi.vmops [None req-911f86c7-75e1-4661-8b4b-23bb532c1f1c tempest-ImagesTestJSON-240028984 tempest-ImagesTestJSON-240028984-project-member] [instance: d0df932e-822b-4c60-a588-445aa2f14cdb] Destroying instance {{(pid=61962) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 888.795336] env[61962]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-20ccb417-4e35-4580-a0ec-bcb122b4d132 {{(pid=61962) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 888.803120] env[61962]: DEBUG nova.virt.vmwareapi.vmops [None req-911f86c7-75e1-4661-8b4b-23bb532c1f1c tempest-ImagesTestJSON-240028984 tempest-ImagesTestJSON-240028984-project-member] [instance: d0df932e-822b-4c60-a588-445aa2f14cdb] Unregistering the VM {{(pid=61962) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 888.803420] env[61962]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-ab1505b4-cbd6-4b2e-8d7a-ae4c835e2ac5 {{(pid=61962) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 888.806209] env[61962]: DEBUG nova.virt.vmwareapi.ds_util [None req-3f485ca9-0fd9-450d-ac9a-a3575d3edc94 tempest-ServersAdmin275Test-677477004 tempest-ServersAdmin275Test-677477004-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=61962) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 888.806425] env[61962]: DEBUG nova.virt.vmwareapi.vmops [None req-3f485ca9-0fd9-450d-ac9a-a3575d3edc94 tempest-ServersAdmin275Test-677477004 tempest-ServersAdmin275Test-677477004-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=61962) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 888.807526] env[61962]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-773b04fd-48a3-4391-bec9-702e135c1a87 {{(pid=61962) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 888.813359] env[61962]: DEBUG oslo_vmware.api [None req-3f485ca9-0fd9-450d-ac9a-a3575d3edc94 tempest-ServersAdmin275Test-677477004 tempest-ServersAdmin275Test-677477004-project-member] Waiting for the task: (returnval){ [ 888.813359] env[61962]: value = "session[5210918f-aadc-9b29-42fa-0929c2e42627]521a53bd-9c97-b520-d12a-7431a5b3d66b" [ 888.813359] env[61962]: _type = "Task" [ 888.813359] env[61962]: } to complete. {{(pid=61962) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 888.829780] env[61962]: DEBUG oslo_vmware.api [None req-3f485ca9-0fd9-450d-ac9a-a3575d3edc94 tempest-ServersAdmin275Test-677477004 tempest-ServersAdmin275Test-677477004-project-member] Task: {'id': session[5210918f-aadc-9b29-42fa-0929c2e42627]521a53bd-9c97-b520-d12a-7431a5b3d66b, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61962) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 888.891488] env[61962]: DEBUG nova.virt.vmwareapi.vmops [None req-911f86c7-75e1-4661-8b4b-23bb532c1f1c tempest-ImagesTestJSON-240028984 tempest-ImagesTestJSON-240028984-project-member] [instance: d0df932e-822b-4c60-a588-445aa2f14cdb] Unregistered the VM {{(pid=61962) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 888.891488] env[61962]: DEBUG nova.virt.vmwareapi.vmops [None req-911f86c7-75e1-4661-8b4b-23bb532c1f1c tempest-ImagesTestJSON-240028984 tempest-ImagesTestJSON-240028984-project-member] [instance: d0df932e-822b-4c60-a588-445aa2f14cdb] Deleting contents of the VM from datastore datastore2 {{(pid=61962) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 888.891488] env[61962]: DEBUG nova.virt.vmwareapi.ds_util [None req-911f86c7-75e1-4661-8b4b-23bb532c1f1c tempest-ImagesTestJSON-240028984 tempest-ImagesTestJSON-240028984-project-member] Deleting the datastore file [datastore2] d0df932e-822b-4c60-a588-445aa2f14cdb {{(pid=61962) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 888.891488] env[61962]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-d69e1eb0-c110-4a0b-ae2f-7e265860f2f4 {{(pid=61962) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 888.899513] env[61962]: DEBUG oslo_vmware.api [None req-911f86c7-75e1-4661-8b4b-23bb532c1f1c tempest-ImagesTestJSON-240028984 tempest-ImagesTestJSON-240028984-project-member] Waiting for the task: (returnval){ [ 888.899513] env[61962]: value = "task-4891976" [ 888.899513] env[61962]: _type = "Task" [ 888.899513] env[61962]: } to complete. {{(pid=61962) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 888.911471] env[61962]: DEBUG oslo_vmware.api [None req-911f86c7-75e1-4661-8b4b-23bb532c1f1c tempest-ImagesTestJSON-240028984 tempest-ImagesTestJSON-240028984-project-member] Task: {'id': task-4891976, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61962) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 889.323952] env[61962]: DEBUG nova.virt.vmwareapi.vmops [None req-3f485ca9-0fd9-450d-ac9a-a3575d3edc94 tempest-ServersAdmin275Test-677477004 tempest-ServersAdmin275Test-677477004-project-member] [instance: b9782756-27e1-46fc-940f-069cfb9ab9f9] Preparing fetch location {{(pid=61962) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 889.324313] env[61962]: DEBUG nova.virt.vmwareapi.ds_util [None req-3f485ca9-0fd9-450d-ac9a-a3575d3edc94 tempest-ServersAdmin275Test-677477004 tempest-ServersAdmin275Test-677477004-project-member] Creating directory with path [datastore2] vmware_temp/2c9c38f8-c95e-4bc7-8b63-b663ffab8965/f684bb17-3ab2-4bd5-a19e-4c36c57d0ebe {{(pid=61962) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 889.324500] env[61962]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-039c39b7-de98-4a60-a81e-cafa2f7b9734 {{(pid=61962) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 889.353514] env[61962]: DEBUG nova.virt.vmwareapi.ds_util [None req-3f485ca9-0fd9-450d-ac9a-a3575d3edc94 tempest-ServersAdmin275Test-677477004 tempest-ServersAdmin275Test-677477004-project-member] Created directory with path [datastore2] vmware_temp/2c9c38f8-c95e-4bc7-8b63-b663ffab8965/f684bb17-3ab2-4bd5-a19e-4c36c57d0ebe {{(pid=61962) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 889.353760] env[61962]: DEBUG nova.virt.vmwareapi.vmops [None req-3f485ca9-0fd9-450d-ac9a-a3575d3edc94 tempest-ServersAdmin275Test-677477004 tempest-ServersAdmin275Test-677477004-project-member] [instance: b9782756-27e1-46fc-940f-069cfb9ab9f9] Fetch image to [datastore2] vmware_temp/2c9c38f8-c95e-4bc7-8b63-b663ffab8965/f684bb17-3ab2-4bd5-a19e-4c36c57d0ebe/tmp-sparse.vmdk {{(pid=61962) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 889.353934] env[61962]: DEBUG nova.virt.vmwareapi.vmops [None req-3f485ca9-0fd9-450d-ac9a-a3575d3edc94 tempest-ServersAdmin275Test-677477004 tempest-ServersAdmin275Test-677477004-project-member] [instance: b9782756-27e1-46fc-940f-069cfb9ab9f9] Downloading image file data f684bb17-3ab2-4bd5-a19e-4c36c57d0ebe to [datastore2] vmware_temp/2c9c38f8-c95e-4bc7-8b63-b663ffab8965/f684bb17-3ab2-4bd5-a19e-4c36c57d0ebe/tmp-sparse.vmdk on the data store datastore2 {{(pid=61962) _fetch_image_as_file /opt/stack/nova/nova/virt/vmwareapi/vmops.py:399}} [ 889.354767] env[61962]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4caf869e-777b-4346-ae48-afa1d1adc05a {{(pid=61962) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 889.362592] env[61962]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8936b50d-7954-461b-8bdf-472692d6e353 {{(pid=61962) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 889.372584] env[61962]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-541a48c6-4fd4-47a1-89e8-ca253f1dfc83 {{(pid=61962) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 889.409045] env[61962]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-55e4e47a-0f06-4f2e-8d0d-4eed444e39a4 {{(pid=61962) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 889.418533] env[61962]: DEBUG oslo_vmware.service [-] Invoking SessionManager.AcquireGenericServiceTicket with opID=oslo.vmware-65625a0e-1f73-424b-a597-6e6cd243521c {{(pid=61962) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 889.420369] env[61962]: DEBUG oslo_vmware.api [None req-911f86c7-75e1-4661-8b4b-23bb532c1f1c tempest-ImagesTestJSON-240028984 tempest-ImagesTestJSON-240028984-project-member] Task: {'id': task-4891976, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.086845} completed successfully. {{(pid=61962) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 889.420657] env[61962]: DEBUG nova.virt.vmwareapi.ds_util [None req-911f86c7-75e1-4661-8b4b-23bb532c1f1c tempest-ImagesTestJSON-240028984 tempest-ImagesTestJSON-240028984-project-member] Deleted the datastore file {{(pid=61962) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 889.420842] env[61962]: DEBUG nova.virt.vmwareapi.vmops [None req-911f86c7-75e1-4661-8b4b-23bb532c1f1c tempest-ImagesTestJSON-240028984 tempest-ImagesTestJSON-240028984-project-member] [instance: d0df932e-822b-4c60-a588-445aa2f14cdb] Deleted contents of the VM from datastore datastore2 {{(pid=61962) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 889.421078] env[61962]: DEBUG nova.virt.vmwareapi.vmops [None req-911f86c7-75e1-4661-8b4b-23bb532c1f1c tempest-ImagesTestJSON-240028984 tempest-ImagesTestJSON-240028984-project-member] [instance: d0df932e-822b-4c60-a588-445aa2f14cdb] Instance destroyed {{(pid=61962) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 889.421198] env[61962]: INFO nova.compute.manager [None req-911f86c7-75e1-4661-8b4b-23bb532c1f1c tempest-ImagesTestJSON-240028984 tempest-ImagesTestJSON-240028984-project-member] [instance: d0df932e-822b-4c60-a588-445aa2f14cdb] Took 0.63 seconds to destroy the instance on the hypervisor. [ 889.423269] env[61962]: DEBUG nova.compute.claims [None req-911f86c7-75e1-4661-8b4b-23bb532c1f1c tempest-ImagesTestJSON-240028984 tempest-ImagesTestJSON-240028984-project-member] [instance: d0df932e-822b-4c60-a588-445aa2f14cdb] Aborting claim: {{(pid=61962) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 889.423436] env[61962]: DEBUG oslo_concurrency.lockutils [None req-911f86c7-75e1-4661-8b4b-23bb532c1f1c tempest-ImagesTestJSON-240028984 tempest-ImagesTestJSON-240028984-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=61962) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 889.423642] env[61962]: DEBUG oslo_concurrency.lockutils [None req-911f86c7-75e1-4661-8b4b-23bb532c1f1c tempest-ImagesTestJSON-240028984 tempest-ImagesTestJSON-240028984-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 0.000s {{(pid=61962) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 889.445582] env[61962]: DEBUG nova.virt.vmwareapi.images [None req-3f485ca9-0fd9-450d-ac9a-a3575d3edc94 tempest-ServersAdmin275Test-677477004 tempest-ServersAdmin275Test-677477004-project-member] [instance: b9782756-27e1-46fc-940f-069cfb9ab9f9] Downloading image file data f684bb17-3ab2-4bd5-a19e-4c36c57d0ebe to the data store datastore2 {{(pid=61962) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:245}} [ 889.505543] env[61962]: DEBUG oslo_vmware.rw_handles [None req-3f485ca9-0fd9-450d-ac9a-a3575d3edc94 tempest-ServersAdmin275Test-677477004 tempest-ServersAdmin275Test-677477004-project-member] Creating HTTP connection to write to file with size = 21318656 and URL = https://esx7c2n2.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/2c9c38f8-c95e-4bc7-8b63-b663ffab8965/f684bb17-3ab2-4bd5-a19e-4c36c57d0ebe/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=61962) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 889.568862] env[61962]: DEBUG oslo_vmware.rw_handles [None req-3f485ca9-0fd9-450d-ac9a-a3575d3edc94 tempest-ServersAdmin275Test-677477004 tempest-ServersAdmin275Test-677477004-project-member] Completed reading data from the image iterator. {{(pid=61962) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 889.569075] env[61962]: DEBUG oslo_vmware.rw_handles [None req-3f485ca9-0fd9-450d-ac9a-a3575d3edc94 tempest-ServersAdmin275Test-677477004 tempest-ServersAdmin275Test-677477004-project-member] Closing write handle for https://esx7c2n2.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/2c9c38f8-c95e-4bc7-8b63-b663ffab8965/f684bb17-3ab2-4bd5-a19e-4c36c57d0ebe/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=61962) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:281}} [ 889.903694] env[61962]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-824d2b30-bd18-466a-a16e-25395c014685 {{(pid=61962) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 889.912109] env[61962]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-56ae15f1-8416-43b8-a94b-220b49f2c2f4 {{(pid=61962) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 889.944215] env[61962]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b93460c5-7669-471d-a305-ca1a2d1bf75c {{(pid=61962) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 889.952604] env[61962]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7523bbb3-5580-4b6f-bf00-bb6c95c602c5 {{(pid=61962) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 889.966679] env[61962]: DEBUG nova.compute.provider_tree [None req-911f86c7-75e1-4661-8b4b-23bb532c1f1c tempest-ImagesTestJSON-240028984 tempest-ImagesTestJSON-240028984-project-member] Inventory has not changed in ProviderTree for provider: 5a20dc57-fddd-49ec-bab5-953a03eebaa1 {{(pid=61962) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 889.975860] env[61962]: DEBUG nova.scheduler.client.report [None req-911f86c7-75e1-4661-8b4b-23bb532c1f1c tempest-ImagesTestJSON-240028984 tempest-ImagesTestJSON-240028984-project-member] Inventory has not changed for provider 5a20dc57-fddd-49ec-bab5-953a03eebaa1 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 96, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61962) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 889.994536] env[61962]: DEBUG oslo_concurrency.lockutils [None req-911f86c7-75e1-4661-8b4b-23bb532c1f1c tempest-ImagesTestJSON-240028984 tempest-ImagesTestJSON-240028984-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 0.571s {{(pid=61962) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 889.995147] env[61962]: ERROR nova.compute.manager [None req-911f86c7-75e1-4661-8b4b-23bb532c1f1c tempest-ImagesTestJSON-240028984 tempest-ImagesTestJSON-240028984-project-member] [instance: d0df932e-822b-4c60-a588-445aa2f14cdb] Failed to build and run instance: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 889.995147] env[61962]: Faults: ['InvalidArgument'] [ 889.995147] env[61962]: ERROR nova.compute.manager [instance: d0df932e-822b-4c60-a588-445aa2f14cdb] Traceback (most recent call last): [ 889.995147] env[61962]: ERROR nova.compute.manager [instance: d0df932e-822b-4c60-a588-445aa2f14cdb] File "/opt/stack/nova/nova/compute/manager.py", line 2633, in _build_and_run_instance [ 889.995147] env[61962]: ERROR nova.compute.manager [instance: d0df932e-822b-4c60-a588-445aa2f14cdb] self.driver.spawn(context, instance, image_meta, [ 889.995147] env[61962]: ERROR nova.compute.manager [instance: d0df932e-822b-4c60-a588-445aa2f14cdb] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 889.995147] env[61962]: ERROR nova.compute.manager [instance: d0df932e-822b-4c60-a588-445aa2f14cdb] self._vmops.spawn(context, instance, image_meta, injected_files, [ 889.995147] env[61962]: ERROR nova.compute.manager [instance: d0df932e-822b-4c60-a588-445aa2f14cdb] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 889.995147] env[61962]: ERROR nova.compute.manager [instance: d0df932e-822b-4c60-a588-445aa2f14cdb] self._fetch_image_if_missing(context, vi) [ 889.995147] env[61962]: ERROR nova.compute.manager [instance: d0df932e-822b-4c60-a588-445aa2f14cdb] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 889.995147] env[61962]: ERROR nova.compute.manager [instance: d0df932e-822b-4c60-a588-445aa2f14cdb] image_cache(vi, tmp_image_ds_loc) [ 889.995147] env[61962]: ERROR nova.compute.manager [instance: d0df932e-822b-4c60-a588-445aa2f14cdb] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 889.995873] env[61962]: ERROR nova.compute.manager [instance: d0df932e-822b-4c60-a588-445aa2f14cdb] vm_util.copy_virtual_disk( [ 889.995873] env[61962]: ERROR nova.compute.manager [instance: d0df932e-822b-4c60-a588-445aa2f14cdb] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 889.995873] env[61962]: ERROR nova.compute.manager [instance: d0df932e-822b-4c60-a588-445aa2f14cdb] session._wait_for_task(vmdk_copy_task) [ 889.995873] env[61962]: ERROR nova.compute.manager [instance: d0df932e-822b-4c60-a588-445aa2f14cdb] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 889.995873] env[61962]: ERROR nova.compute.manager [instance: d0df932e-822b-4c60-a588-445aa2f14cdb] return self.wait_for_task(task_ref) [ 889.995873] env[61962]: ERROR nova.compute.manager [instance: d0df932e-822b-4c60-a588-445aa2f14cdb] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 889.995873] env[61962]: ERROR nova.compute.manager [instance: d0df932e-822b-4c60-a588-445aa2f14cdb] return evt.wait() [ 889.995873] env[61962]: ERROR nova.compute.manager [instance: d0df932e-822b-4c60-a588-445aa2f14cdb] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 889.995873] env[61962]: ERROR nova.compute.manager [instance: d0df932e-822b-4c60-a588-445aa2f14cdb] result = hub.switch() [ 889.995873] env[61962]: ERROR nova.compute.manager [instance: d0df932e-822b-4c60-a588-445aa2f14cdb] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 889.995873] env[61962]: ERROR nova.compute.manager [instance: d0df932e-822b-4c60-a588-445aa2f14cdb] return self.greenlet.switch() [ 889.995873] env[61962]: ERROR nova.compute.manager [instance: d0df932e-822b-4c60-a588-445aa2f14cdb] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 889.995873] env[61962]: ERROR nova.compute.manager [instance: d0df932e-822b-4c60-a588-445aa2f14cdb] self.f(*self.args, **self.kw) [ 889.996587] env[61962]: ERROR nova.compute.manager [instance: d0df932e-822b-4c60-a588-445aa2f14cdb] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 889.996587] env[61962]: ERROR nova.compute.manager [instance: d0df932e-822b-4c60-a588-445aa2f14cdb] raise exceptions.translate_fault(task_info.error) [ 889.996587] env[61962]: ERROR nova.compute.manager [instance: d0df932e-822b-4c60-a588-445aa2f14cdb] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 889.996587] env[61962]: ERROR nova.compute.manager [instance: d0df932e-822b-4c60-a588-445aa2f14cdb] Faults: ['InvalidArgument'] [ 889.996587] env[61962]: ERROR nova.compute.manager [instance: d0df932e-822b-4c60-a588-445aa2f14cdb] [ 889.996587] env[61962]: DEBUG nova.compute.utils [None req-911f86c7-75e1-4661-8b4b-23bb532c1f1c tempest-ImagesTestJSON-240028984 tempest-ImagesTestJSON-240028984-project-member] [instance: d0df932e-822b-4c60-a588-445aa2f14cdb] VimFaultException {{(pid=61962) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 889.997451] env[61962]: DEBUG nova.compute.manager [None req-911f86c7-75e1-4661-8b4b-23bb532c1f1c tempest-ImagesTestJSON-240028984 tempest-ImagesTestJSON-240028984-project-member] [instance: d0df932e-822b-4c60-a588-445aa2f14cdb] Build of instance d0df932e-822b-4c60-a588-445aa2f14cdb was re-scheduled: A specified parameter was not correct: fileType [ 889.997451] env[61962]: Faults: ['InvalidArgument'] {{(pid=61962) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2472}} [ 889.997820] env[61962]: DEBUG nova.compute.manager [None req-911f86c7-75e1-4661-8b4b-23bb532c1f1c tempest-ImagesTestJSON-240028984 tempest-ImagesTestJSON-240028984-project-member] [instance: d0df932e-822b-4c60-a588-445aa2f14cdb] Unplugging VIFs for instance {{(pid=61962) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:2998}} [ 889.997995] env[61962]: DEBUG nova.compute.manager [None req-911f86c7-75e1-4661-8b4b-23bb532c1f1c tempest-ImagesTestJSON-240028984 tempest-ImagesTestJSON-240028984-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=61962) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3021}} [ 889.998194] env[61962]: DEBUG nova.compute.manager [None req-911f86c7-75e1-4661-8b4b-23bb532c1f1c tempest-ImagesTestJSON-240028984 tempest-ImagesTestJSON-240028984-project-member] [instance: d0df932e-822b-4c60-a588-445aa2f14cdb] Deallocating network for instance {{(pid=61962) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2281}} [ 889.998361] env[61962]: DEBUG nova.network.neutron [None req-911f86c7-75e1-4661-8b4b-23bb532c1f1c tempest-ImagesTestJSON-240028984 tempest-ImagesTestJSON-240028984-project-member] [instance: d0df932e-822b-4c60-a588-445aa2f14cdb] deallocate_for_instance() {{(pid=61962) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 890.340227] env[61962]: DEBUG nova.network.neutron [None req-911f86c7-75e1-4661-8b4b-23bb532c1f1c tempest-ImagesTestJSON-240028984 tempest-ImagesTestJSON-240028984-project-member] [instance: d0df932e-822b-4c60-a588-445aa2f14cdb] Updating instance_info_cache with network_info: [] {{(pid=61962) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 890.356908] env[61962]: INFO nova.compute.manager [None req-911f86c7-75e1-4661-8b4b-23bb532c1f1c tempest-ImagesTestJSON-240028984 tempest-ImagesTestJSON-240028984-project-member] [instance: d0df932e-822b-4c60-a588-445aa2f14cdb] Took 0.36 seconds to deallocate network for instance. [ 890.473197] env[61962]: INFO nova.scheduler.client.report [None req-911f86c7-75e1-4661-8b4b-23bb532c1f1c tempest-ImagesTestJSON-240028984 tempest-ImagesTestJSON-240028984-project-member] Deleted allocations for instance d0df932e-822b-4c60-a588-445aa2f14cdb [ 890.496831] env[61962]: DEBUG oslo_concurrency.lockutils [None req-911f86c7-75e1-4661-8b4b-23bb532c1f1c tempest-ImagesTestJSON-240028984 tempest-ImagesTestJSON-240028984-project-member] Lock "d0df932e-822b-4c60-a588-445aa2f14cdb" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 338.207s {{(pid=61962) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 890.498207] env[61962]: DEBUG oslo_concurrency.lockutils [None req-59faea55-6bf8-49ed-a131-74eebe70678f tempest-ImagesTestJSON-240028984 tempest-ImagesTestJSON-240028984-project-member] Lock "d0df932e-822b-4c60-a588-445aa2f14cdb" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 138.922s {{(pid=61962) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 890.499083] env[61962]: DEBUG oslo_concurrency.lockutils [None req-59faea55-6bf8-49ed-a131-74eebe70678f tempest-ImagesTestJSON-240028984 tempest-ImagesTestJSON-240028984-project-member] Acquiring lock "d0df932e-822b-4c60-a588-445aa2f14cdb-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=61962) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 890.499083] env[61962]: DEBUG oslo_concurrency.lockutils [None req-59faea55-6bf8-49ed-a131-74eebe70678f tempest-ImagesTestJSON-240028984 tempest-ImagesTestJSON-240028984-project-member] Lock "d0df932e-822b-4c60-a588-445aa2f14cdb-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=61962) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 890.499083] env[61962]: DEBUG oslo_concurrency.lockutils [None req-59faea55-6bf8-49ed-a131-74eebe70678f tempest-ImagesTestJSON-240028984 tempest-ImagesTestJSON-240028984-project-member] Lock "d0df932e-822b-4c60-a588-445aa2f14cdb-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=61962) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 890.501183] env[61962]: INFO nova.compute.manager [None req-59faea55-6bf8-49ed-a131-74eebe70678f tempest-ImagesTestJSON-240028984 tempest-ImagesTestJSON-240028984-project-member] [instance: d0df932e-822b-4c60-a588-445aa2f14cdb] Terminating instance [ 890.503436] env[61962]: DEBUG nova.compute.manager [None req-59faea55-6bf8-49ed-a131-74eebe70678f tempest-ImagesTestJSON-240028984 tempest-ImagesTestJSON-240028984-project-member] [instance: d0df932e-822b-4c60-a588-445aa2f14cdb] Start destroying the instance on the hypervisor. {{(pid=61962) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3142}} [ 890.503436] env[61962]: DEBUG nova.virt.vmwareapi.vmops [None req-59faea55-6bf8-49ed-a131-74eebe70678f tempest-ImagesTestJSON-240028984 tempest-ImagesTestJSON-240028984-project-member] [instance: d0df932e-822b-4c60-a588-445aa2f14cdb] Destroying instance {{(pid=61962) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 890.504198] env[61962]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-f3ae931b-11ee-4de6-bb42-078469c3529a {{(pid=61962) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 890.510454] env[61962]: DEBUG nova.compute.manager [None req-aeada297-64d9-442f-a2b8-fc450e4cd4d2 tempest-ServersAdminTestJSON-987189382 tempest-ServersAdminTestJSON-987189382-project-member] [instance: 1dcafe3b-989b-4290-be54-ceccc62a323f] Starting instance... {{(pid=61962) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2424}} [ 890.517665] env[61962]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b6b03d70-a4f4-467b-8808-c11d28bbc9d4 {{(pid=61962) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 890.550507] env[61962]: WARNING nova.virt.vmwareapi.vmops [None req-59faea55-6bf8-49ed-a131-74eebe70678f tempest-ImagesTestJSON-240028984 tempest-ImagesTestJSON-240028984-project-member] [instance: d0df932e-822b-4c60-a588-445aa2f14cdb] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance d0df932e-822b-4c60-a588-445aa2f14cdb could not be found. [ 890.550507] env[61962]: DEBUG nova.virt.vmwareapi.vmops [None req-59faea55-6bf8-49ed-a131-74eebe70678f tempest-ImagesTestJSON-240028984 tempest-ImagesTestJSON-240028984-project-member] [instance: d0df932e-822b-4c60-a588-445aa2f14cdb] Instance destroyed {{(pid=61962) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 890.550710] env[61962]: INFO nova.compute.manager [None req-59faea55-6bf8-49ed-a131-74eebe70678f tempest-ImagesTestJSON-240028984 tempest-ImagesTestJSON-240028984-project-member] [instance: d0df932e-822b-4c60-a588-445aa2f14cdb] Took 0.05 seconds to destroy the instance on the hypervisor. [ 890.550937] env[61962]: DEBUG oslo.service.loopingcall [None req-59faea55-6bf8-49ed-a131-74eebe70678f tempest-ImagesTestJSON-240028984 tempest-ImagesTestJSON-240028984-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61962) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 890.554025] env[61962]: DEBUG nova.compute.manager [-] [instance: d0df932e-822b-4c60-a588-445aa2f14cdb] Deallocating network for instance {{(pid=61962) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2281}} [ 890.554025] env[61962]: DEBUG nova.network.neutron [-] [instance: d0df932e-822b-4c60-a588-445aa2f14cdb] deallocate_for_instance() {{(pid=61962) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 890.572796] env[61962]: DEBUG oslo_concurrency.lockutils [None req-aeada297-64d9-442f-a2b8-fc450e4cd4d2 tempest-ServersAdminTestJSON-987189382 tempest-ServersAdminTestJSON-987189382-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61962) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 890.573042] env[61962]: DEBUG oslo_concurrency.lockutils [None req-aeada297-64d9-442f-a2b8-fc450e4cd4d2 tempest-ServersAdminTestJSON-987189382 tempest-ServersAdminTestJSON-987189382-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=61962) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 890.574884] env[61962]: INFO nova.compute.claims [None req-aeada297-64d9-442f-a2b8-fc450e4cd4d2 tempest-ServersAdminTestJSON-987189382 tempest-ServersAdminTestJSON-987189382-project-member] [instance: 1dcafe3b-989b-4290-be54-ceccc62a323f] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 890.587286] env[61962]: DEBUG nova.network.neutron [-] [instance: d0df932e-822b-4c60-a588-445aa2f14cdb] Updating instance_info_cache with network_info: [] {{(pid=61962) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 890.597196] env[61962]: INFO nova.compute.manager [-] [instance: d0df932e-822b-4c60-a588-445aa2f14cdb] Took 0.04 seconds to deallocate network for instance. [ 890.757369] env[61962]: DEBUG oslo_concurrency.lockutils [None req-59faea55-6bf8-49ed-a131-74eebe70678f tempest-ImagesTestJSON-240028984 tempest-ImagesTestJSON-240028984-project-member] Lock "d0df932e-822b-4c60-a588-445aa2f14cdb" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 0.259s {{(pid=61962) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 891.016837] env[61962]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c6440bae-b76c-406b-9888-37f3504ac154 {{(pid=61962) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 891.026138] env[61962]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0d2ce1f7-f95f-4ca2-ae5b-614ba7207147 {{(pid=61962) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 891.057299] env[61962]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ad9660de-055d-479b-a8d8-6b072b77fb24 {{(pid=61962) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 891.065329] env[61962]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-08a2fb06-1c15-4939-85ff-843120be6821 {{(pid=61962) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 891.079742] env[61962]: DEBUG nova.compute.provider_tree [None req-aeada297-64d9-442f-a2b8-fc450e4cd4d2 tempest-ServersAdminTestJSON-987189382 tempest-ServersAdminTestJSON-987189382-project-member] Inventory has not changed in ProviderTree for provider: 5a20dc57-fddd-49ec-bab5-953a03eebaa1 {{(pid=61962) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 891.088808] env[61962]: DEBUG nova.scheduler.client.report [None req-aeada297-64d9-442f-a2b8-fc450e4cd4d2 tempest-ServersAdminTestJSON-987189382 tempest-ServersAdminTestJSON-987189382-project-member] Inventory has not changed for provider 5a20dc57-fddd-49ec-bab5-953a03eebaa1 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 96, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61962) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 891.104017] env[61962]: DEBUG oslo_concurrency.lockutils [None req-aeada297-64d9-442f-a2b8-fc450e4cd4d2 tempest-ServersAdminTestJSON-987189382 tempest-ServersAdminTestJSON-987189382-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.531s {{(pid=61962) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 891.104557] env[61962]: DEBUG nova.compute.manager [None req-aeada297-64d9-442f-a2b8-fc450e4cd4d2 tempest-ServersAdminTestJSON-987189382 tempest-ServersAdminTestJSON-987189382-project-member] [instance: 1dcafe3b-989b-4290-be54-ceccc62a323f] Start building networks asynchronously for instance. {{(pid=61962) _build_resources /opt/stack/nova/nova/compute/manager.py:2821}} [ 891.138357] env[61962]: DEBUG nova.compute.utils [None req-aeada297-64d9-442f-a2b8-fc450e4cd4d2 tempest-ServersAdminTestJSON-987189382 tempest-ServersAdminTestJSON-987189382-project-member] Using /dev/sd instead of None {{(pid=61962) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 891.139836] env[61962]: DEBUG nova.compute.manager [None req-aeada297-64d9-442f-a2b8-fc450e4cd4d2 tempest-ServersAdminTestJSON-987189382 tempest-ServersAdminTestJSON-987189382-project-member] [instance: 1dcafe3b-989b-4290-be54-ceccc62a323f] Allocating IP information in the background. {{(pid=61962) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1973}} [ 891.140014] env[61962]: DEBUG nova.network.neutron [None req-aeada297-64d9-442f-a2b8-fc450e4cd4d2 tempest-ServersAdminTestJSON-987189382 tempest-ServersAdminTestJSON-987189382-project-member] [instance: 1dcafe3b-989b-4290-be54-ceccc62a323f] allocate_for_instance() {{(pid=61962) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 891.149052] env[61962]: DEBUG nova.compute.manager [None req-aeada297-64d9-442f-a2b8-fc450e4cd4d2 tempest-ServersAdminTestJSON-987189382 tempest-ServersAdminTestJSON-987189382-project-member] [instance: 1dcafe3b-989b-4290-be54-ceccc62a323f] Start building block device mappings for instance. {{(pid=61962) _build_resources /opt/stack/nova/nova/compute/manager.py:2856}} [ 891.211955] env[61962]: DEBUG nova.policy [None req-aeada297-64d9-442f-a2b8-fc450e4cd4d2 tempest-ServersAdminTestJSON-987189382 tempest-ServersAdminTestJSON-987189382-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'd8931cb2485949fda33b47e892f9743c', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '7738f0dd32484bf9978c5d006d088c3b', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61962) authorize /opt/stack/nova/nova/policy.py:203}} [ 891.221464] env[61962]: DEBUG nova.compute.manager [None req-aeada297-64d9-442f-a2b8-fc450e4cd4d2 tempest-ServersAdminTestJSON-987189382 tempest-ServersAdminTestJSON-987189382-project-member] [instance: 1dcafe3b-989b-4290-be54-ceccc62a323f] Start spawning the instance on the hypervisor. {{(pid=61962) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2630}} [ 891.248344] env[61962]: DEBUG nova.virt.hardware [None req-aeada297-64d9-442f-a2b8-fc450e4cd4d2 tempest-ServersAdminTestJSON-987189382 tempest-ServersAdminTestJSON-987189382-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-12-01T12:09:23Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=128,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-12-01T12:09:07Z,direct_url=,disk_format='vmdk',id=f684bb17-3ab2-4bd5-a19e-4c36c57d0ebe,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='eef556fb5c4f49b889491ae31a496de5',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-12-01T12:09:08Z,virtual_size=,visibility=), allow threads: False {{(pid=61962) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 891.249487] env[61962]: DEBUG nova.virt.hardware [None req-aeada297-64d9-442f-a2b8-fc450e4cd4d2 tempest-ServersAdminTestJSON-987189382 tempest-ServersAdminTestJSON-987189382-project-member] Flavor limits 0:0:0 {{(pid=61962) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 891.249487] env[61962]: DEBUG nova.virt.hardware [None req-aeada297-64d9-442f-a2b8-fc450e4cd4d2 tempest-ServersAdminTestJSON-987189382 tempest-ServersAdminTestJSON-987189382-project-member] Image limits 0:0:0 {{(pid=61962) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 891.249487] env[61962]: DEBUG nova.virt.hardware [None req-aeada297-64d9-442f-a2b8-fc450e4cd4d2 tempest-ServersAdminTestJSON-987189382 tempest-ServersAdminTestJSON-987189382-project-member] Flavor pref 0:0:0 {{(pid=61962) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 891.249487] env[61962]: DEBUG nova.virt.hardware [None req-aeada297-64d9-442f-a2b8-fc450e4cd4d2 tempest-ServersAdminTestJSON-987189382 tempest-ServersAdminTestJSON-987189382-project-member] Image pref 0:0:0 {{(pid=61962) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 891.249487] env[61962]: DEBUG nova.virt.hardware [None req-aeada297-64d9-442f-a2b8-fc450e4cd4d2 tempest-ServersAdminTestJSON-987189382 tempest-ServersAdminTestJSON-987189382-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61962) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 891.249781] env[61962]: DEBUG nova.virt.hardware [None req-aeada297-64d9-442f-a2b8-fc450e4cd4d2 tempest-ServersAdminTestJSON-987189382 tempest-ServersAdminTestJSON-987189382-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61962) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 891.249781] env[61962]: DEBUG nova.virt.hardware [None req-aeada297-64d9-442f-a2b8-fc450e4cd4d2 tempest-ServersAdminTestJSON-987189382 tempest-ServersAdminTestJSON-987189382-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61962) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 891.249920] env[61962]: DEBUG nova.virt.hardware [None req-aeada297-64d9-442f-a2b8-fc450e4cd4d2 tempest-ServersAdminTestJSON-987189382 tempest-ServersAdminTestJSON-987189382-project-member] Got 1 possible topologies {{(pid=61962) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 891.250094] env[61962]: DEBUG nova.virt.hardware [None req-aeada297-64d9-442f-a2b8-fc450e4cd4d2 tempest-ServersAdminTestJSON-987189382 tempest-ServersAdminTestJSON-987189382-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61962) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 891.250272] env[61962]: DEBUG nova.virt.hardware [None req-aeada297-64d9-442f-a2b8-fc450e4cd4d2 tempest-ServersAdminTestJSON-987189382 tempest-ServersAdminTestJSON-987189382-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61962) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 891.251134] env[61962]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-100161ce-cd9b-43be-82cc-be035091f552 {{(pid=61962) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 891.259849] env[61962]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-572ab3a7-401b-43cc-91c9-a78ca29b2dd3 {{(pid=61962) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 891.741021] env[61962]: DEBUG nova.network.neutron [None req-aeada297-64d9-442f-a2b8-fc450e4cd4d2 tempest-ServersAdminTestJSON-987189382 tempest-ServersAdminTestJSON-987189382-project-member] [instance: 1dcafe3b-989b-4290-be54-ceccc62a323f] Successfully created port: 527f3dcd-81df-4e36-a184-7f05a42baab4 {{(pid=61962) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 891.931715] env[61962]: DEBUG oslo_concurrency.lockutils [None req-b93286ff-68e9-402b-952c-c77dbd100087 tempest-ImagesTestJSON-240028984 tempest-ImagesTestJSON-240028984-project-member] Acquiring lock "d89abe69-dd71-4cfe-b80b-3b2aeba0b8a9" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61962) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 891.932403] env[61962]: DEBUG oslo_concurrency.lockutils [None req-b93286ff-68e9-402b-952c-c77dbd100087 tempest-ImagesTestJSON-240028984 tempest-ImagesTestJSON-240028984-project-member] Lock "d89abe69-dd71-4cfe-b80b-3b2aeba0b8a9" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61962) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 892.539488] env[61962]: DEBUG nova.compute.manager [req-bbb96d74-dc7d-42e4-9712-bd1bbdfd4329 req-8919b627-f3b1-41f1-a2b0-a8bf0097cdb7 service nova] [instance: 1dcafe3b-989b-4290-be54-ceccc62a323f] Received event network-vif-plugged-527f3dcd-81df-4e36-a184-7f05a42baab4 {{(pid=61962) external_instance_event /opt/stack/nova/nova/compute/manager.py:11210}} [ 892.539702] env[61962]: DEBUG oslo_concurrency.lockutils [req-bbb96d74-dc7d-42e4-9712-bd1bbdfd4329 req-8919b627-f3b1-41f1-a2b0-a8bf0097cdb7 service nova] Acquiring lock "1dcafe3b-989b-4290-be54-ceccc62a323f-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=61962) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 892.539901] env[61962]: DEBUG oslo_concurrency.lockutils [req-bbb96d74-dc7d-42e4-9712-bd1bbdfd4329 req-8919b627-f3b1-41f1-a2b0-a8bf0097cdb7 service nova] Lock "1dcafe3b-989b-4290-be54-ceccc62a323f-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=61962) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 892.540125] env[61962]: DEBUG oslo_concurrency.lockutils [req-bbb96d74-dc7d-42e4-9712-bd1bbdfd4329 req-8919b627-f3b1-41f1-a2b0-a8bf0097cdb7 service nova] Lock "1dcafe3b-989b-4290-be54-ceccc62a323f-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=61962) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 892.540249] env[61962]: DEBUG nova.compute.manager [req-bbb96d74-dc7d-42e4-9712-bd1bbdfd4329 req-8919b627-f3b1-41f1-a2b0-a8bf0097cdb7 service nova] [instance: 1dcafe3b-989b-4290-be54-ceccc62a323f] No waiting events found dispatching network-vif-plugged-527f3dcd-81df-4e36-a184-7f05a42baab4 {{(pid=61962) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 892.540409] env[61962]: WARNING nova.compute.manager [req-bbb96d74-dc7d-42e4-9712-bd1bbdfd4329 req-8919b627-f3b1-41f1-a2b0-a8bf0097cdb7 service nova] [instance: 1dcafe3b-989b-4290-be54-ceccc62a323f] Received unexpected event network-vif-plugged-527f3dcd-81df-4e36-a184-7f05a42baab4 for instance with vm_state building and task_state spawning. [ 892.694259] env[61962]: DEBUG nova.network.neutron [None req-aeada297-64d9-442f-a2b8-fc450e4cd4d2 tempest-ServersAdminTestJSON-987189382 tempest-ServersAdminTestJSON-987189382-project-member] [instance: 1dcafe3b-989b-4290-be54-ceccc62a323f] Successfully updated port: 527f3dcd-81df-4e36-a184-7f05a42baab4 {{(pid=61962) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 892.706897] env[61962]: DEBUG oslo_concurrency.lockutils [None req-aeada297-64d9-442f-a2b8-fc450e4cd4d2 tempest-ServersAdminTestJSON-987189382 tempest-ServersAdminTestJSON-987189382-project-member] Acquiring lock "refresh_cache-1dcafe3b-989b-4290-be54-ceccc62a323f" {{(pid=61962) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 892.706897] env[61962]: DEBUG oslo_concurrency.lockutils [None req-aeada297-64d9-442f-a2b8-fc450e4cd4d2 tempest-ServersAdminTestJSON-987189382 tempest-ServersAdminTestJSON-987189382-project-member] Acquired lock "refresh_cache-1dcafe3b-989b-4290-be54-ceccc62a323f" {{(pid=61962) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 892.707144] env[61962]: DEBUG nova.network.neutron [None req-aeada297-64d9-442f-a2b8-fc450e4cd4d2 tempest-ServersAdminTestJSON-987189382 tempest-ServersAdminTestJSON-987189382-project-member] [instance: 1dcafe3b-989b-4290-be54-ceccc62a323f] Building network info cache for instance {{(pid=61962) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 892.777836] env[61962]: DEBUG nova.network.neutron [None req-aeada297-64d9-442f-a2b8-fc450e4cd4d2 tempest-ServersAdminTestJSON-987189382 tempest-ServersAdminTestJSON-987189382-project-member] [instance: 1dcafe3b-989b-4290-be54-ceccc62a323f] Instance cache missing network info. {{(pid=61962) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 893.013934] env[61962]: DEBUG nova.network.neutron [None req-aeada297-64d9-442f-a2b8-fc450e4cd4d2 tempest-ServersAdminTestJSON-987189382 tempest-ServersAdminTestJSON-987189382-project-member] [instance: 1dcafe3b-989b-4290-be54-ceccc62a323f] Updating instance_info_cache with network_info: [{"id": "527f3dcd-81df-4e36-a184-7f05a42baab4", "address": "fa:16:3e:20:54:26", "network": {"id": "7b807b78-d526-4afc-8d2b-b41e0ca6829d", "bridge": "br-int", "label": "tempest-ServersAdminTestJSON-1306795093-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "7738f0dd32484bf9978c5d006d088c3b", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a7cd4cea-788c-4e6d-9df8-5d83838e2e2a", "external-id": "nsx-vlan-transportzone-361", "segmentation_id": 361, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap527f3dcd-81", "ovs_interfaceid": "527f3dcd-81df-4e36-a184-7f05a42baab4", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61962) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 893.026566] env[61962]: DEBUG oslo_concurrency.lockutils [None req-aeada297-64d9-442f-a2b8-fc450e4cd4d2 tempest-ServersAdminTestJSON-987189382 tempest-ServersAdminTestJSON-987189382-project-member] Releasing lock "refresh_cache-1dcafe3b-989b-4290-be54-ceccc62a323f" {{(pid=61962) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 893.026970] env[61962]: DEBUG nova.compute.manager [None req-aeada297-64d9-442f-a2b8-fc450e4cd4d2 tempest-ServersAdminTestJSON-987189382 tempest-ServersAdminTestJSON-987189382-project-member] [instance: 1dcafe3b-989b-4290-be54-ceccc62a323f] Instance network_info: |[{"id": "527f3dcd-81df-4e36-a184-7f05a42baab4", "address": "fa:16:3e:20:54:26", "network": {"id": "7b807b78-d526-4afc-8d2b-b41e0ca6829d", "bridge": "br-int", "label": "tempest-ServersAdminTestJSON-1306795093-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "7738f0dd32484bf9978c5d006d088c3b", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a7cd4cea-788c-4e6d-9df8-5d83838e2e2a", "external-id": "nsx-vlan-transportzone-361", "segmentation_id": 361, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap527f3dcd-81", "ovs_interfaceid": "527f3dcd-81df-4e36-a184-7f05a42baab4", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=61962) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 893.027693] env[61962]: DEBUG nova.virt.vmwareapi.vmops [None req-aeada297-64d9-442f-a2b8-fc450e4cd4d2 tempest-ServersAdminTestJSON-987189382 tempest-ServersAdminTestJSON-987189382-project-member] [instance: 1dcafe3b-989b-4290-be54-ceccc62a323f] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:20:54:26', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'a7cd4cea-788c-4e6d-9df8-5d83838e2e2a', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '527f3dcd-81df-4e36-a184-7f05a42baab4', 'vif_model': 'vmxnet3'}] {{(pid=61962) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 893.035690] env[61962]: DEBUG nova.virt.vmwareapi.vm_util [None req-aeada297-64d9-442f-a2b8-fc450e4cd4d2 tempest-ServersAdminTestJSON-987189382 tempest-ServersAdminTestJSON-987189382-project-member] Creating folder: Project (7738f0dd32484bf9978c5d006d088c3b). Parent ref: group-v953327. {{(pid=61962) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 893.036363] env[61962]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-abaf94fe-939c-4fa3-a48c-3f42c6903304 {{(pid=61962) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 893.047409] env[61962]: INFO nova.virt.vmwareapi.vm_util [None req-aeada297-64d9-442f-a2b8-fc450e4cd4d2 tempest-ServersAdminTestJSON-987189382 tempest-ServersAdminTestJSON-987189382-project-member] Created folder: Project (7738f0dd32484bf9978c5d006d088c3b) in parent group-v953327. [ 893.047694] env[61962]: DEBUG nova.virt.vmwareapi.vm_util [None req-aeada297-64d9-442f-a2b8-fc450e4cd4d2 tempest-ServersAdminTestJSON-987189382 tempest-ServersAdminTestJSON-987189382-project-member] Creating folder: Instances. Parent ref: group-v953378. {{(pid=61962) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 893.047884] env[61962]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-b0857ffa-89d3-45c3-8189-add5e279c4ac {{(pid=61962) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 893.059872] env[61962]: INFO nova.virt.vmwareapi.vm_util [None req-aeada297-64d9-442f-a2b8-fc450e4cd4d2 tempest-ServersAdminTestJSON-987189382 tempest-ServersAdminTestJSON-987189382-project-member] Created folder: Instances in parent group-v953378. [ 893.060147] env[61962]: DEBUG oslo.service.loopingcall [None req-aeada297-64d9-442f-a2b8-fc450e4cd4d2 tempest-ServersAdminTestJSON-987189382 tempest-ServersAdminTestJSON-987189382-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=61962) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 893.060340] env[61962]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 1dcafe3b-989b-4290-be54-ceccc62a323f] Creating VM on the ESX host {{(pid=61962) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 893.060547] env[61962]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-8aae26a7-d298-4f7d-9d75-c1c3c92299e7 {{(pid=61962) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 893.080773] env[61962]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 893.080773] env[61962]: value = "task-4891979" [ 893.080773] env[61962]: _type = "Task" [ 893.080773] env[61962]: } to complete. {{(pid=61962) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 893.088722] env[61962]: DEBUG oslo_vmware.api [-] Task: {'id': task-4891979, 'name': CreateVM_Task} progress is 0%. {{(pid=61962) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 893.591132] env[61962]: DEBUG oslo_vmware.api [-] Task: {'id': task-4891979, 'name': CreateVM_Task, 'duration_secs': 0.29934} completed successfully. {{(pid=61962) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 893.591310] env[61962]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 1dcafe3b-989b-4290-be54-ceccc62a323f] Created VM on the ESX host {{(pid=61962) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 893.591979] env[61962]: DEBUG oslo_concurrency.lockutils [None req-aeada297-64d9-442f-a2b8-fc450e4cd4d2 tempest-ServersAdminTestJSON-987189382 tempest-ServersAdminTestJSON-987189382-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/f684bb17-3ab2-4bd5-a19e-4c36c57d0ebe" {{(pid=61962) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 893.592189] env[61962]: DEBUG oslo_concurrency.lockutils [None req-aeada297-64d9-442f-a2b8-fc450e4cd4d2 tempest-ServersAdminTestJSON-987189382 tempest-ServersAdminTestJSON-987189382-project-member] Acquired lock "[datastore2] devstack-image-cache_base/f684bb17-3ab2-4bd5-a19e-4c36c57d0ebe" {{(pid=61962) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 893.592499] env[61962]: DEBUG oslo_concurrency.lockutils [None req-aeada297-64d9-442f-a2b8-fc450e4cd4d2 tempest-ServersAdminTestJSON-987189382 tempest-ServersAdminTestJSON-987189382-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/f684bb17-3ab2-4bd5-a19e-4c36c57d0ebe" {{(pid=61962) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 893.592758] env[61962]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-b49522af-5066-4d65-a5e9-6e4908e4d1fe {{(pid=61962) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 893.597145] env[61962]: DEBUG oslo_vmware.api [None req-aeada297-64d9-442f-a2b8-fc450e4cd4d2 tempest-ServersAdminTestJSON-987189382 tempest-ServersAdminTestJSON-987189382-project-member] Waiting for the task: (returnval){ [ 893.597145] env[61962]: value = "session[5210918f-aadc-9b29-42fa-0929c2e42627]52f8a153-5b60-243f-ad52-8cbea1b16c1c" [ 893.597145] env[61962]: _type = "Task" [ 893.597145] env[61962]: } to complete. {{(pid=61962) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 893.604532] env[61962]: DEBUG oslo_vmware.api [None req-aeada297-64d9-442f-a2b8-fc450e4cd4d2 tempest-ServersAdminTestJSON-987189382 tempest-ServersAdminTestJSON-987189382-project-member] Task: {'id': session[5210918f-aadc-9b29-42fa-0929c2e42627]52f8a153-5b60-243f-ad52-8cbea1b16c1c, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61962) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 894.108830] env[61962]: DEBUG oslo_concurrency.lockutils [None req-aeada297-64d9-442f-a2b8-fc450e4cd4d2 tempest-ServersAdminTestJSON-987189382 tempest-ServersAdminTestJSON-987189382-project-member] Releasing lock "[datastore2] devstack-image-cache_base/f684bb17-3ab2-4bd5-a19e-4c36c57d0ebe" {{(pid=61962) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 894.109108] env[61962]: DEBUG nova.virt.vmwareapi.vmops [None req-aeada297-64d9-442f-a2b8-fc450e4cd4d2 tempest-ServersAdminTestJSON-987189382 tempest-ServersAdminTestJSON-987189382-project-member] [instance: 1dcafe3b-989b-4290-be54-ceccc62a323f] Processing image f684bb17-3ab2-4bd5-a19e-4c36c57d0ebe {{(pid=61962) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 894.109286] env[61962]: DEBUG oslo_concurrency.lockutils [None req-aeada297-64d9-442f-a2b8-fc450e4cd4d2 tempest-ServersAdminTestJSON-987189382 tempest-ServersAdminTestJSON-987189382-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/f684bb17-3ab2-4bd5-a19e-4c36c57d0ebe/f684bb17-3ab2-4bd5-a19e-4c36c57d0ebe.vmdk" {{(pid=61962) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 894.630120] env[61962]: DEBUG nova.compute.manager [req-c4edeff8-6e60-4f81-a493-fbb14db01b81 req-10286f47-49f8-456f-ade0-9c25471f96ae service nova] [instance: 1dcafe3b-989b-4290-be54-ceccc62a323f] Received event network-changed-527f3dcd-81df-4e36-a184-7f05a42baab4 {{(pid=61962) external_instance_event /opt/stack/nova/nova/compute/manager.py:11210}} [ 894.630120] env[61962]: DEBUG nova.compute.manager [req-c4edeff8-6e60-4f81-a493-fbb14db01b81 req-10286f47-49f8-456f-ade0-9c25471f96ae service nova] [instance: 1dcafe3b-989b-4290-be54-ceccc62a323f] Refreshing instance network info cache due to event network-changed-527f3dcd-81df-4e36-a184-7f05a42baab4. {{(pid=61962) external_instance_event /opt/stack/nova/nova/compute/manager.py:11215}} [ 894.630267] env[61962]: DEBUG oslo_concurrency.lockutils [req-c4edeff8-6e60-4f81-a493-fbb14db01b81 req-10286f47-49f8-456f-ade0-9c25471f96ae service nova] Acquiring lock "refresh_cache-1dcafe3b-989b-4290-be54-ceccc62a323f" {{(pid=61962) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 894.630324] env[61962]: DEBUG oslo_concurrency.lockutils [req-c4edeff8-6e60-4f81-a493-fbb14db01b81 req-10286f47-49f8-456f-ade0-9c25471f96ae service nova] Acquired lock "refresh_cache-1dcafe3b-989b-4290-be54-ceccc62a323f" {{(pid=61962) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 894.630490] env[61962]: DEBUG nova.network.neutron [req-c4edeff8-6e60-4f81-a493-fbb14db01b81 req-10286f47-49f8-456f-ade0-9c25471f96ae service nova] [instance: 1dcafe3b-989b-4290-be54-ceccc62a323f] Refreshing network info cache for port 527f3dcd-81df-4e36-a184-7f05a42baab4 {{(pid=61962) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 895.208867] env[61962]: DEBUG nova.network.neutron [req-c4edeff8-6e60-4f81-a493-fbb14db01b81 req-10286f47-49f8-456f-ade0-9c25471f96ae service nova] [instance: 1dcafe3b-989b-4290-be54-ceccc62a323f] Updated VIF entry in instance network info cache for port 527f3dcd-81df-4e36-a184-7f05a42baab4. {{(pid=61962) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 895.209265] env[61962]: DEBUG nova.network.neutron [req-c4edeff8-6e60-4f81-a493-fbb14db01b81 req-10286f47-49f8-456f-ade0-9c25471f96ae service nova] [instance: 1dcafe3b-989b-4290-be54-ceccc62a323f] Updating instance_info_cache with network_info: [{"id": "527f3dcd-81df-4e36-a184-7f05a42baab4", "address": "fa:16:3e:20:54:26", "network": {"id": "7b807b78-d526-4afc-8d2b-b41e0ca6829d", "bridge": "br-int", "label": "tempest-ServersAdminTestJSON-1306795093-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "7738f0dd32484bf9978c5d006d088c3b", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a7cd4cea-788c-4e6d-9df8-5d83838e2e2a", "external-id": "nsx-vlan-transportzone-361", "segmentation_id": 361, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap527f3dcd-81", "ovs_interfaceid": "527f3dcd-81df-4e36-a184-7f05a42baab4", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61962) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 895.219842] env[61962]: DEBUG oslo_concurrency.lockutils [req-c4edeff8-6e60-4f81-a493-fbb14db01b81 req-10286f47-49f8-456f-ade0-9c25471f96ae service nova] Releasing lock "refresh_cache-1dcafe3b-989b-4290-be54-ceccc62a323f" {{(pid=61962) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 904.829308] env[61962]: DEBUG oslo_concurrency.lockutils [None req-380688c5-7486-432d-88a2-312775180ffc tempest-ServersAdminTestJSON-987189382 tempest-ServersAdminTestJSON-987189382-project-member] Acquiring lock "1dcafe3b-989b-4290-be54-ceccc62a323f" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=61962) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 921.160139] env[61962]: DEBUG oslo_service.periodic_task [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=61962) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 921.160139] env[61962]: DEBUG nova.compute.manager [None req-ba337279-4320-40ca-b616-7e590432f203 None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=61962) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10626}} [ 921.160139] env[61962]: DEBUG oslo_service.periodic_task [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Running periodic task ComputeManager.update_available_resource {{(pid=61962) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 921.173016] env[61962]: DEBUG oslo_concurrency.lockutils [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=61962) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 921.173271] env[61962]: DEBUG oslo_concurrency.lockutils [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=61962) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 921.173447] env[61962]: DEBUG oslo_concurrency.lockutils [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=61962) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 921.173607] env[61962]: DEBUG nova.compute.resource_tracker [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=61962) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 921.175630] env[61962]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0f3518a4-783e-4268-800a-8841ce3717dc {{(pid=61962) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 921.185216] env[61962]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5ce28be3-1b85-4c61-bbc1-7e95df6aa7df {{(pid=61962) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 921.200426] env[61962]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-61448844-fdbb-4e32-810b-5579f5478906 {{(pid=61962) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 921.208398] env[61962]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6a1dbc15-4c12-4808-a61b-41e3e7d149d9 {{(pid=61962) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 921.239688] env[61962]: DEBUG nova.compute.resource_tracker [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=180553MB free_disk=96GB free_vcpus=48 pci_devices=None {{(pid=61962) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 921.239852] env[61962]: DEBUG oslo_concurrency.lockutils [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=61962) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 921.240051] env[61962]: DEBUG oslo_concurrency.lockutils [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=61962) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 921.330823] env[61962]: DEBUG nova.compute.resource_tracker [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Instance a6e6f30d-c667-4f70-bf60-596a8fe97188 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61962) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 921.330984] env[61962]: DEBUG nova.compute.resource_tracker [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Instance b9782756-27e1-46fc-940f-069cfb9ab9f9 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61962) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 921.331125] env[61962]: DEBUG nova.compute.resource_tracker [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Instance 667f56c0-896f-4e25-9e8a-3954f7733dc0 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61962) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 921.331248] env[61962]: DEBUG nova.compute.resource_tracker [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Instance 295064f0-9677-4287-877a-ef2a33a87fef actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61962) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 921.331365] env[61962]: DEBUG nova.compute.resource_tracker [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Instance 6996289c-f881-4733-8943-98e3633e43ca actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61962) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 921.331480] env[61962]: DEBUG nova.compute.resource_tracker [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Instance 85c6b6af-f1b5-47ca-8e02-61c3669ec02b actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61962) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 921.331596] env[61962]: DEBUG nova.compute.resource_tracker [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Instance 0aa340f7-a219-47f5-9d7d-dd4062643cf7 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61962) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 921.331708] env[61962]: DEBUG nova.compute.resource_tracker [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Instance ff7d9da5-bc9a-41f2-a2e4-521d87b22117 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61962) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 921.331822] env[61962]: DEBUG nova.compute.resource_tracker [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Instance bf82c44c-95ce-4e4c-a32f-444bba1ef5cd actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61962) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 921.331933] env[61962]: DEBUG nova.compute.resource_tracker [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Instance 1dcafe3b-989b-4290-be54-ceccc62a323f actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61962) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 921.348140] env[61962]: DEBUG nova.compute.resource_tracker [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Instance 454eebcc-ea18-4046-ba2e-01de1c8a954a has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61962) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1767}} [ 921.359250] env[61962]: DEBUG nova.compute.resource_tracker [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Instance e76b5908-3bad-4c19-90e7-b15ca253ad80 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61962) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1767}} [ 921.372020] env[61962]: DEBUG nova.compute.resource_tracker [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Instance c17b6733-edea-4db9-b937-c38585eb92bf has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61962) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1767}} [ 921.383810] env[61962]: DEBUG nova.compute.resource_tracker [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Instance 1228fef1-3837-4a1b-8c88-fc1e27af6b4f has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61962) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1767}} [ 921.394866] env[61962]: DEBUG nova.compute.resource_tracker [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Instance 66eb1344-b692-4f1c-a5ac-b3518e350741 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61962) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1767}} [ 921.404983] env[61962]: DEBUG nova.compute.resource_tracker [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Instance b33ecac7-cb2f-438c-9ee4-7cd53388e318 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61962) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1767}} [ 921.414933] env[61962]: DEBUG nova.compute.resource_tracker [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Instance 252f4add-476d-46bc-b369-c8d66a512142 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61962) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1767}} [ 921.425293] env[61962]: DEBUG nova.compute.resource_tracker [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Instance 647d141e-1aaf-4e5e-b0a3-31c9bee0adf7 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61962) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1767}} [ 921.435320] env[61962]: DEBUG nova.compute.resource_tracker [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Instance fb33811b-242a-4cce-ae11-c004e507e7de has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61962) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1767}} [ 921.445749] env[61962]: DEBUG nova.compute.resource_tracker [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Instance c5eca2f9-f569-4510-882b-217c2c28ec69 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61962) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1767}} [ 921.456158] env[61962]: DEBUG nova.compute.resource_tracker [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Instance 684d7213-d30a-4881-8e13-9817d6765074 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61962) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1767}} [ 921.467377] env[61962]: DEBUG nova.compute.resource_tracker [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Instance 9cf159b3-53c8-4002-9e9b-5c0f82e1e555 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61962) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1767}} [ 921.477598] env[61962]: DEBUG nova.compute.resource_tracker [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Instance 306d093c-40fa-4863-b725-b3fc65042dce has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61962) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1767}} [ 921.488948] env[61962]: DEBUG nova.compute.resource_tracker [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Instance f53ed31d-4cca-418d-b708-4797dbc5920f has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61962) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1767}} [ 921.501274] env[61962]: DEBUG nova.compute.resource_tracker [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Instance 8ecd73a7-7f23-4994-85b9-479584831687 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61962) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1767}} [ 921.510921] env[61962]: DEBUG nova.compute.resource_tracker [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Instance d89abe69-dd71-4cfe-b80b-3b2aeba0b8a9 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61962) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1767}} [ 921.511276] env[61962]: DEBUG nova.compute.resource_tracker [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Total usable vcpus: 48, total allocated vcpus: 10 {{(pid=61962) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 921.511627] env[61962]: DEBUG nova.compute.resource_tracker [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=1792MB phys_disk=100GB used_disk=10GB total_vcpus=48 used_vcpus=10 pci_stats=[] stats={'failed_builds': '24', 'num_instances': '10', 'num_vm_building': '10', 'num_task_deleting': '10', 'num_os_type_None': '10', 'num_proj_6e7e747becdf4976b570b79d4b20d88b': '1', 'io_workload': '10', 'num_proj_a7b748ae1e6d4e17bc463811008c5596': '1', 'num_proj_6c00b1a76e464ad2a9788673e256de22': '1', 'num_proj_78e44aa9cd5642928627210447b3e177': '1', 'num_proj_b4cf0a2155644882b4bf42e69e210138': '1', 'num_proj_ed943c69c73d48e2b1b2bab311295d84': '2', 'num_proj_5481169540de4205a1ff7731132eb3a7': '1', 'num_proj_389fb8d4c2ce47829fe23a6ca60e9bdd': '1', 'num_proj_7738f0dd32484bf9978c5d006d088c3b': '1'} {{(pid=61962) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 921.834904] env[61962]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e455f25a-7d09-4b9b-82b3-2770fd9cb107 {{(pid=61962) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 921.842859] env[61962]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7b7a9d7a-829c-4d5a-b181-5268c6ac5357 {{(pid=61962) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 921.873861] env[61962]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bd657730-c70c-4b7b-a1a7-92c4efd9092e {{(pid=61962) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 921.881807] env[61962]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b1543075-0650-410d-be1d-bc23da923088 {{(pid=61962) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 921.896071] env[61962]: DEBUG nova.compute.provider_tree [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Inventory has not changed in ProviderTree for provider: 5a20dc57-fddd-49ec-bab5-953a03eebaa1 {{(pid=61962) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 921.904969] env[61962]: DEBUG nova.scheduler.client.report [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Inventory has not changed for provider 5a20dc57-fddd-49ec-bab5-953a03eebaa1 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 96, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61962) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 921.920398] env[61962]: DEBUG nova.compute.resource_tracker [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=61962) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 921.920792] env[61962]: DEBUG oslo_concurrency.lockutils [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 0.681s {{(pid=61962) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 923.921445] env[61962]: DEBUG oslo_service.periodic_task [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=61962) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 924.154040] env[61962]: DEBUG oslo_service.periodic_task [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=61962) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 925.158869] env[61962]: DEBUG oslo_service.periodic_task [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=61962) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 925.159205] env[61962]: DEBUG nova.compute.manager [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Starting heal instance info cache {{(pid=61962) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10007}} [ 925.159300] env[61962]: DEBUG nova.compute.manager [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Rebuilding the list of instances to heal {{(pid=61962) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10011}} [ 925.182582] env[61962]: DEBUG nova.compute.manager [None req-ba337279-4320-40ca-b616-7e590432f203 None None] [instance: a6e6f30d-c667-4f70-bf60-596a8fe97188] Skipping network cache update for instance because it is Building. {{(pid=61962) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10020}} [ 925.182743] env[61962]: DEBUG nova.compute.manager [None req-ba337279-4320-40ca-b616-7e590432f203 None None] [instance: b9782756-27e1-46fc-940f-069cfb9ab9f9] Skipping network cache update for instance because it is Building. {{(pid=61962) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10020}} [ 925.182876] env[61962]: DEBUG nova.compute.manager [None req-ba337279-4320-40ca-b616-7e590432f203 None None] [instance: 667f56c0-896f-4e25-9e8a-3954f7733dc0] Skipping network cache update for instance because it is Building. {{(pid=61962) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10020}} [ 925.183020] env[61962]: DEBUG nova.compute.manager [None req-ba337279-4320-40ca-b616-7e590432f203 None None] [instance: 295064f0-9677-4287-877a-ef2a33a87fef] Skipping network cache update for instance because it is Building. {{(pid=61962) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10020}} [ 925.183146] env[61962]: DEBUG nova.compute.manager [None req-ba337279-4320-40ca-b616-7e590432f203 None None] [instance: 6996289c-f881-4733-8943-98e3633e43ca] Skipping network cache update for instance because it is Building. {{(pid=61962) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10020}} [ 925.183267] env[61962]: DEBUG nova.compute.manager [None req-ba337279-4320-40ca-b616-7e590432f203 None None] [instance: 85c6b6af-f1b5-47ca-8e02-61c3669ec02b] Skipping network cache update for instance because it is Building. {{(pid=61962) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10020}} [ 925.183388] env[61962]: DEBUG nova.compute.manager [None req-ba337279-4320-40ca-b616-7e590432f203 None None] [instance: 0aa340f7-a219-47f5-9d7d-dd4062643cf7] Skipping network cache update for instance because it is Building. {{(pid=61962) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10020}} [ 925.183506] env[61962]: DEBUG nova.compute.manager [None req-ba337279-4320-40ca-b616-7e590432f203 None None] [instance: ff7d9da5-bc9a-41f2-a2e4-521d87b22117] Skipping network cache update for instance because it is Building. {{(pid=61962) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10020}} [ 925.183623] env[61962]: DEBUG nova.compute.manager [None req-ba337279-4320-40ca-b616-7e590432f203 None None] [instance: bf82c44c-95ce-4e4c-a32f-444bba1ef5cd] Skipping network cache update for instance because it is Building. {{(pid=61962) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10020}} [ 925.183739] env[61962]: DEBUG nova.compute.manager [None req-ba337279-4320-40ca-b616-7e590432f203 None None] [instance: 1dcafe3b-989b-4290-be54-ceccc62a323f] Skipping network cache update for instance because it is Building. {{(pid=61962) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10020}} [ 925.183964] env[61962]: DEBUG nova.compute.manager [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Didn't find any instances for network info cache update. {{(pid=61962) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10093}} [ 925.184391] env[61962]: DEBUG oslo_service.periodic_task [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=61962) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 926.158380] env[61962]: DEBUG oslo_service.periodic_task [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=61962) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 928.158310] env[61962]: DEBUG oslo_service.periodic_task [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=61962) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 928.158670] env[61962]: DEBUG oslo_service.periodic_task [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=61962) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 936.850866] env[61962]: WARNING oslo_vmware.rw_handles [None req-3f485ca9-0fd9-450d-ac9a-a3575d3edc94 tempest-ServersAdmin275Test-677477004 tempest-ServersAdmin275Test-677477004-project-member] Error occurred while reading the HTTP response.: http.client.RemoteDisconnected: Remote end closed connection without response [ 936.850866] env[61962]: ERROR oslo_vmware.rw_handles Traceback (most recent call last): [ 936.850866] env[61962]: ERROR oslo_vmware.rw_handles File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py", line 283, in close [ 936.850866] env[61962]: ERROR oslo_vmware.rw_handles self._conn.getresponse() [ 936.850866] env[61962]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 1375, in getresponse [ 936.850866] env[61962]: ERROR oslo_vmware.rw_handles response.begin() [ 936.850866] env[61962]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 318, in begin [ 936.850866] env[61962]: ERROR oslo_vmware.rw_handles version, status, reason = self._read_status() [ 936.850866] env[61962]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 287, in _read_status [ 936.850866] env[61962]: ERROR oslo_vmware.rw_handles raise RemoteDisconnected("Remote end closed connection without" [ 936.850866] env[61962]: ERROR oslo_vmware.rw_handles http.client.RemoteDisconnected: Remote end closed connection without response [ 936.850866] env[61962]: ERROR oslo_vmware.rw_handles [ 936.850866] env[61962]: DEBUG nova.virt.vmwareapi.images [None req-3f485ca9-0fd9-450d-ac9a-a3575d3edc94 tempest-ServersAdmin275Test-677477004 tempest-ServersAdmin275Test-677477004-project-member] [instance: b9782756-27e1-46fc-940f-069cfb9ab9f9] Downloaded image file data f684bb17-3ab2-4bd5-a19e-4c36c57d0ebe to vmware_temp/2c9c38f8-c95e-4bc7-8b63-b663ffab8965/f684bb17-3ab2-4bd5-a19e-4c36c57d0ebe/tmp-sparse.vmdk on the data store datastore2 {{(pid=61962) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:258}} [ 936.852314] env[61962]: DEBUG nova.virt.vmwareapi.vmops [None req-3f485ca9-0fd9-450d-ac9a-a3575d3edc94 tempest-ServersAdmin275Test-677477004 tempest-ServersAdmin275Test-677477004-project-member] [instance: b9782756-27e1-46fc-940f-069cfb9ab9f9] Caching image {{(pid=61962) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 936.852564] env[61962]: DEBUG nova.virt.vmwareapi.vm_util [None req-3f485ca9-0fd9-450d-ac9a-a3575d3edc94 tempest-ServersAdmin275Test-677477004 tempest-ServersAdmin275Test-677477004-project-member] Copying Virtual Disk [datastore2] vmware_temp/2c9c38f8-c95e-4bc7-8b63-b663ffab8965/f684bb17-3ab2-4bd5-a19e-4c36c57d0ebe/tmp-sparse.vmdk to [datastore2] vmware_temp/2c9c38f8-c95e-4bc7-8b63-b663ffab8965/f684bb17-3ab2-4bd5-a19e-4c36c57d0ebe/f684bb17-3ab2-4bd5-a19e-4c36c57d0ebe.vmdk {{(pid=61962) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 936.852844] env[61962]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-465ac86b-1729-40ac-ba23-3a314c623481 {{(pid=61962) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 936.862078] env[61962]: DEBUG oslo_vmware.api [None req-3f485ca9-0fd9-450d-ac9a-a3575d3edc94 tempest-ServersAdmin275Test-677477004 tempest-ServersAdmin275Test-677477004-project-member] Waiting for the task: (returnval){ [ 936.862078] env[61962]: value = "task-4891980" [ 936.862078] env[61962]: _type = "Task" [ 936.862078] env[61962]: } to complete. {{(pid=61962) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 936.871261] env[61962]: DEBUG oslo_vmware.api [None req-3f485ca9-0fd9-450d-ac9a-a3575d3edc94 tempest-ServersAdmin275Test-677477004 tempest-ServersAdmin275Test-677477004-project-member] Task: {'id': task-4891980, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61962) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 937.375641] env[61962]: DEBUG oslo_vmware.exceptions [None req-3f485ca9-0fd9-450d-ac9a-a3575d3edc94 tempest-ServersAdmin275Test-677477004 tempest-ServersAdmin275Test-677477004-project-member] Fault InvalidArgument not matched. {{(pid=61962) get_fault_class /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/exceptions.py:290}} [ 937.375976] env[61962]: DEBUG oslo_concurrency.lockutils [None req-3f485ca9-0fd9-450d-ac9a-a3575d3edc94 tempest-ServersAdmin275Test-677477004 tempest-ServersAdmin275Test-677477004-project-member] Releasing lock "[datastore2] devstack-image-cache_base/f684bb17-3ab2-4bd5-a19e-4c36c57d0ebe/f684bb17-3ab2-4bd5-a19e-4c36c57d0ebe.vmdk" {{(pid=61962) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 937.376534] env[61962]: ERROR nova.compute.manager [None req-3f485ca9-0fd9-450d-ac9a-a3575d3edc94 tempest-ServersAdmin275Test-677477004 tempest-ServersAdmin275Test-677477004-project-member] [instance: b9782756-27e1-46fc-940f-069cfb9ab9f9] Instance failed to spawn: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 937.376534] env[61962]: Faults: ['InvalidArgument'] [ 937.376534] env[61962]: ERROR nova.compute.manager [instance: b9782756-27e1-46fc-940f-069cfb9ab9f9] Traceback (most recent call last): [ 937.376534] env[61962]: ERROR nova.compute.manager [instance: b9782756-27e1-46fc-940f-069cfb9ab9f9] File "/opt/stack/nova/nova/compute/manager.py", line 2886, in _build_resources [ 937.376534] env[61962]: ERROR nova.compute.manager [instance: b9782756-27e1-46fc-940f-069cfb9ab9f9] yield resources [ 937.376534] env[61962]: ERROR nova.compute.manager [instance: b9782756-27e1-46fc-940f-069cfb9ab9f9] File "/opt/stack/nova/nova/compute/manager.py", line 2633, in _build_and_run_instance [ 937.376534] env[61962]: ERROR nova.compute.manager [instance: b9782756-27e1-46fc-940f-069cfb9ab9f9] self.driver.spawn(context, instance, image_meta, [ 937.376534] env[61962]: ERROR nova.compute.manager [instance: b9782756-27e1-46fc-940f-069cfb9ab9f9] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 937.376534] env[61962]: ERROR nova.compute.manager [instance: b9782756-27e1-46fc-940f-069cfb9ab9f9] self._vmops.spawn(context, instance, image_meta, injected_files, [ 937.376534] env[61962]: ERROR nova.compute.manager [instance: b9782756-27e1-46fc-940f-069cfb9ab9f9] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 937.376534] env[61962]: ERROR nova.compute.manager [instance: b9782756-27e1-46fc-940f-069cfb9ab9f9] self._fetch_image_if_missing(context, vi) [ 937.376534] env[61962]: ERROR nova.compute.manager [instance: b9782756-27e1-46fc-940f-069cfb9ab9f9] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 937.376992] env[61962]: ERROR nova.compute.manager [instance: b9782756-27e1-46fc-940f-069cfb9ab9f9] image_cache(vi, tmp_image_ds_loc) [ 937.376992] env[61962]: ERROR nova.compute.manager [instance: b9782756-27e1-46fc-940f-069cfb9ab9f9] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 937.376992] env[61962]: ERROR nova.compute.manager [instance: b9782756-27e1-46fc-940f-069cfb9ab9f9] vm_util.copy_virtual_disk( [ 937.376992] env[61962]: ERROR nova.compute.manager [instance: b9782756-27e1-46fc-940f-069cfb9ab9f9] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 937.376992] env[61962]: ERROR nova.compute.manager [instance: b9782756-27e1-46fc-940f-069cfb9ab9f9] session._wait_for_task(vmdk_copy_task) [ 937.376992] env[61962]: ERROR nova.compute.manager [instance: b9782756-27e1-46fc-940f-069cfb9ab9f9] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 937.376992] env[61962]: ERROR nova.compute.manager [instance: b9782756-27e1-46fc-940f-069cfb9ab9f9] return self.wait_for_task(task_ref) [ 937.376992] env[61962]: ERROR nova.compute.manager [instance: b9782756-27e1-46fc-940f-069cfb9ab9f9] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 937.376992] env[61962]: ERROR nova.compute.manager [instance: b9782756-27e1-46fc-940f-069cfb9ab9f9] return evt.wait() [ 937.376992] env[61962]: ERROR nova.compute.manager [instance: b9782756-27e1-46fc-940f-069cfb9ab9f9] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 937.376992] env[61962]: ERROR nova.compute.manager [instance: b9782756-27e1-46fc-940f-069cfb9ab9f9] result = hub.switch() [ 937.376992] env[61962]: ERROR nova.compute.manager [instance: b9782756-27e1-46fc-940f-069cfb9ab9f9] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 937.376992] env[61962]: ERROR nova.compute.manager [instance: b9782756-27e1-46fc-940f-069cfb9ab9f9] return self.greenlet.switch() [ 937.377453] env[61962]: ERROR nova.compute.manager [instance: b9782756-27e1-46fc-940f-069cfb9ab9f9] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 937.377453] env[61962]: ERROR nova.compute.manager [instance: b9782756-27e1-46fc-940f-069cfb9ab9f9] self.f(*self.args, **self.kw) [ 937.377453] env[61962]: ERROR nova.compute.manager [instance: b9782756-27e1-46fc-940f-069cfb9ab9f9] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 937.377453] env[61962]: ERROR nova.compute.manager [instance: b9782756-27e1-46fc-940f-069cfb9ab9f9] raise exceptions.translate_fault(task_info.error) [ 937.377453] env[61962]: ERROR nova.compute.manager [instance: b9782756-27e1-46fc-940f-069cfb9ab9f9] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 937.377453] env[61962]: ERROR nova.compute.manager [instance: b9782756-27e1-46fc-940f-069cfb9ab9f9] Faults: ['InvalidArgument'] [ 937.377453] env[61962]: ERROR nova.compute.manager [instance: b9782756-27e1-46fc-940f-069cfb9ab9f9] [ 937.377453] env[61962]: INFO nova.compute.manager [None req-3f485ca9-0fd9-450d-ac9a-a3575d3edc94 tempest-ServersAdmin275Test-677477004 tempest-ServersAdmin275Test-677477004-project-member] [instance: b9782756-27e1-46fc-940f-069cfb9ab9f9] Terminating instance [ 937.378513] env[61962]: DEBUG oslo_concurrency.lockutils [None req-c0cd2188-095e-4a7d-a70d-63e0d919c4c3 tempest-MigrationsAdminTest-5656953 tempest-MigrationsAdminTest-5656953-project-member] Acquired lock "[datastore2] devstack-image-cache_base/f684bb17-3ab2-4bd5-a19e-4c36c57d0ebe/f684bb17-3ab2-4bd5-a19e-4c36c57d0ebe.vmdk" {{(pid=61962) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 937.378762] env[61962]: DEBUG nova.virt.vmwareapi.ds_util [None req-c0cd2188-095e-4a7d-a70d-63e0d919c4c3 tempest-MigrationsAdminTest-5656953 tempest-MigrationsAdminTest-5656953-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=61962) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 937.378979] env[61962]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-03a57396-f3d3-4ce0-bd98-0c0066fc7b65 {{(pid=61962) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 937.381167] env[61962]: DEBUG oslo_concurrency.lockutils [None req-3f485ca9-0fd9-450d-ac9a-a3575d3edc94 tempest-ServersAdmin275Test-677477004 tempest-ServersAdmin275Test-677477004-project-member] Acquiring lock "refresh_cache-b9782756-27e1-46fc-940f-069cfb9ab9f9" {{(pid=61962) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 937.381281] env[61962]: DEBUG oslo_concurrency.lockutils [None req-3f485ca9-0fd9-450d-ac9a-a3575d3edc94 tempest-ServersAdmin275Test-677477004 tempest-ServersAdmin275Test-677477004-project-member] Acquired lock "refresh_cache-b9782756-27e1-46fc-940f-069cfb9ab9f9" {{(pid=61962) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 937.381448] env[61962]: DEBUG nova.network.neutron [None req-3f485ca9-0fd9-450d-ac9a-a3575d3edc94 tempest-ServersAdmin275Test-677477004 tempest-ServersAdmin275Test-677477004-project-member] [instance: b9782756-27e1-46fc-940f-069cfb9ab9f9] Building network info cache for instance {{(pid=61962) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 937.389301] env[61962]: DEBUG nova.virt.vmwareapi.ds_util [None req-c0cd2188-095e-4a7d-a70d-63e0d919c4c3 tempest-MigrationsAdminTest-5656953 tempest-MigrationsAdminTest-5656953-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=61962) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 937.389498] env[61962]: DEBUG nova.virt.vmwareapi.vmops [None req-c0cd2188-095e-4a7d-a70d-63e0d919c4c3 tempest-MigrationsAdminTest-5656953 tempest-MigrationsAdminTest-5656953-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=61962) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 937.390772] env[61962]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-5d84c9bd-1013-4b57-ac33-2b4ea77dad0d {{(pid=61962) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 937.399220] env[61962]: DEBUG oslo_vmware.api [None req-c0cd2188-095e-4a7d-a70d-63e0d919c4c3 tempest-MigrationsAdminTest-5656953 tempest-MigrationsAdminTest-5656953-project-member] Waiting for the task: (returnval){ [ 937.399220] env[61962]: value = "session[5210918f-aadc-9b29-42fa-0929c2e42627]522cb525-c78d-bda9-dbea-43a0af1ddf2c" [ 937.399220] env[61962]: _type = "Task" [ 937.399220] env[61962]: } to complete. {{(pid=61962) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 937.408122] env[61962]: DEBUG oslo_vmware.api [None req-c0cd2188-095e-4a7d-a70d-63e0d919c4c3 tempest-MigrationsAdminTest-5656953 tempest-MigrationsAdminTest-5656953-project-member] Task: {'id': session[5210918f-aadc-9b29-42fa-0929c2e42627]522cb525-c78d-bda9-dbea-43a0af1ddf2c, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61962) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 937.417414] env[61962]: DEBUG nova.network.neutron [None req-3f485ca9-0fd9-450d-ac9a-a3575d3edc94 tempest-ServersAdmin275Test-677477004 tempest-ServersAdmin275Test-677477004-project-member] [instance: b9782756-27e1-46fc-940f-069cfb9ab9f9] Instance cache missing network info. {{(pid=61962) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 937.709729] env[61962]: DEBUG nova.network.neutron [None req-3f485ca9-0fd9-450d-ac9a-a3575d3edc94 tempest-ServersAdmin275Test-677477004 tempest-ServersAdmin275Test-677477004-project-member] [instance: b9782756-27e1-46fc-940f-069cfb9ab9f9] Updating instance_info_cache with network_info: [] {{(pid=61962) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 937.720434] env[61962]: DEBUG oslo_concurrency.lockutils [None req-3f485ca9-0fd9-450d-ac9a-a3575d3edc94 tempest-ServersAdmin275Test-677477004 tempest-ServersAdmin275Test-677477004-project-member] Releasing lock "refresh_cache-b9782756-27e1-46fc-940f-069cfb9ab9f9" {{(pid=61962) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 937.720434] env[61962]: DEBUG nova.compute.manager [None req-3f485ca9-0fd9-450d-ac9a-a3575d3edc94 tempest-ServersAdmin275Test-677477004 tempest-ServersAdmin275Test-677477004-project-member] [instance: b9782756-27e1-46fc-940f-069cfb9ab9f9] Start destroying the instance on the hypervisor. {{(pid=61962) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3142}} [ 937.720434] env[61962]: DEBUG nova.virt.vmwareapi.vmops [None req-3f485ca9-0fd9-450d-ac9a-a3575d3edc94 tempest-ServersAdmin275Test-677477004 tempest-ServersAdmin275Test-677477004-project-member] [instance: b9782756-27e1-46fc-940f-069cfb9ab9f9] Destroying instance {{(pid=61962) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 937.721358] env[61962]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5d9a741f-cc50-4064-a7f8-8c66108e371f {{(pid=61962) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 937.730233] env[61962]: DEBUG nova.virt.vmwareapi.vmops [None req-3f485ca9-0fd9-450d-ac9a-a3575d3edc94 tempest-ServersAdmin275Test-677477004 tempest-ServersAdmin275Test-677477004-project-member] [instance: b9782756-27e1-46fc-940f-069cfb9ab9f9] Unregistering the VM {{(pid=61962) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 937.730518] env[61962]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-e4bd2300-cfe0-45ad-8646-cd5deee6d924 {{(pid=61962) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 937.765798] env[61962]: DEBUG nova.virt.vmwareapi.vmops [None req-3f485ca9-0fd9-450d-ac9a-a3575d3edc94 tempest-ServersAdmin275Test-677477004 tempest-ServersAdmin275Test-677477004-project-member] [instance: b9782756-27e1-46fc-940f-069cfb9ab9f9] Unregistered the VM {{(pid=61962) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 937.766046] env[61962]: DEBUG nova.virt.vmwareapi.vmops [None req-3f485ca9-0fd9-450d-ac9a-a3575d3edc94 tempest-ServersAdmin275Test-677477004 tempest-ServersAdmin275Test-677477004-project-member] [instance: b9782756-27e1-46fc-940f-069cfb9ab9f9] Deleting contents of the VM from datastore datastore2 {{(pid=61962) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 937.766241] env[61962]: DEBUG nova.virt.vmwareapi.ds_util [None req-3f485ca9-0fd9-450d-ac9a-a3575d3edc94 tempest-ServersAdmin275Test-677477004 tempest-ServersAdmin275Test-677477004-project-member] Deleting the datastore file [datastore2] b9782756-27e1-46fc-940f-069cfb9ab9f9 {{(pid=61962) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 937.766505] env[61962]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-0b7b4e5a-c55a-4a2d-a96c-a94fb0b0d61c {{(pid=61962) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 937.775022] env[61962]: DEBUG oslo_vmware.api [None req-3f485ca9-0fd9-450d-ac9a-a3575d3edc94 tempest-ServersAdmin275Test-677477004 tempest-ServersAdmin275Test-677477004-project-member] Waiting for the task: (returnval){ [ 937.775022] env[61962]: value = "task-4891982" [ 937.775022] env[61962]: _type = "Task" [ 937.775022] env[61962]: } to complete. {{(pid=61962) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 937.781561] env[61962]: DEBUG oslo_vmware.api [None req-3f485ca9-0fd9-450d-ac9a-a3575d3edc94 tempest-ServersAdmin275Test-677477004 tempest-ServersAdmin275Test-677477004-project-member] Task: {'id': task-4891982, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61962) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 937.909887] env[61962]: DEBUG nova.virt.vmwareapi.vmops [None req-c0cd2188-095e-4a7d-a70d-63e0d919c4c3 tempest-MigrationsAdminTest-5656953 tempest-MigrationsAdminTest-5656953-project-member] [instance: a6e6f30d-c667-4f70-bf60-596a8fe97188] Preparing fetch location {{(pid=61962) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 937.910169] env[61962]: DEBUG nova.virt.vmwareapi.ds_util [None req-c0cd2188-095e-4a7d-a70d-63e0d919c4c3 tempest-MigrationsAdminTest-5656953 tempest-MigrationsAdminTest-5656953-project-member] Creating directory with path [datastore2] vmware_temp/76fbdcac-53cf-4c97-a06d-173da068b810/f684bb17-3ab2-4bd5-a19e-4c36c57d0ebe {{(pid=61962) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 937.910412] env[61962]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-f666c751-d5d4-48bb-838b-4afe26ff38cb {{(pid=61962) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 937.922603] env[61962]: DEBUG nova.virt.vmwareapi.ds_util [None req-c0cd2188-095e-4a7d-a70d-63e0d919c4c3 tempest-MigrationsAdminTest-5656953 tempest-MigrationsAdminTest-5656953-project-member] Created directory with path [datastore2] vmware_temp/76fbdcac-53cf-4c97-a06d-173da068b810/f684bb17-3ab2-4bd5-a19e-4c36c57d0ebe {{(pid=61962) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 937.922801] env[61962]: DEBUG nova.virt.vmwareapi.vmops [None req-c0cd2188-095e-4a7d-a70d-63e0d919c4c3 tempest-MigrationsAdminTest-5656953 tempest-MigrationsAdminTest-5656953-project-member] [instance: a6e6f30d-c667-4f70-bf60-596a8fe97188] Fetch image to [datastore2] vmware_temp/76fbdcac-53cf-4c97-a06d-173da068b810/f684bb17-3ab2-4bd5-a19e-4c36c57d0ebe/tmp-sparse.vmdk {{(pid=61962) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 937.922977] env[61962]: DEBUG nova.virt.vmwareapi.vmops [None req-c0cd2188-095e-4a7d-a70d-63e0d919c4c3 tempest-MigrationsAdminTest-5656953 tempest-MigrationsAdminTest-5656953-project-member] [instance: a6e6f30d-c667-4f70-bf60-596a8fe97188] Downloading image file data f684bb17-3ab2-4bd5-a19e-4c36c57d0ebe to [datastore2] vmware_temp/76fbdcac-53cf-4c97-a06d-173da068b810/f684bb17-3ab2-4bd5-a19e-4c36c57d0ebe/tmp-sparse.vmdk on the data store datastore2 {{(pid=61962) _fetch_image_as_file /opt/stack/nova/nova/virt/vmwareapi/vmops.py:399}} [ 937.923794] env[61962]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-eeea3295-ec1e-4ce1-b6d3-a56f14186bd4 {{(pid=61962) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 937.932029] env[61962]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9300d899-9e7f-4a13-b21c-f0f2943b7a10 {{(pid=61962) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 937.941694] env[61962]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9948f225-fab4-4f9a-bfff-b8c79541594d {{(pid=61962) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 937.973571] env[61962]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f36a22c8-ae4c-44e4-894f-007c84c659c5 {{(pid=61962) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 937.980436] env[61962]: DEBUG oslo_vmware.service [-] Invoking SessionManager.AcquireGenericServiceTicket with opID=oslo.vmware-dabe8651-d5c7-4b9b-aeda-bcd3307ba8e1 {{(pid=61962) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 938.005696] env[61962]: DEBUG nova.virt.vmwareapi.images [None req-c0cd2188-095e-4a7d-a70d-63e0d919c4c3 tempest-MigrationsAdminTest-5656953 tempest-MigrationsAdminTest-5656953-project-member] [instance: a6e6f30d-c667-4f70-bf60-596a8fe97188] Downloading image file data f684bb17-3ab2-4bd5-a19e-4c36c57d0ebe to the data store datastore2 {{(pid=61962) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:245}} [ 938.064717] env[61962]: DEBUG oslo_vmware.rw_handles [None req-c0cd2188-095e-4a7d-a70d-63e0d919c4c3 tempest-MigrationsAdminTest-5656953 tempest-MigrationsAdminTest-5656953-project-member] Creating HTTP connection to write to file with size = 21318656 and URL = https://esx7c2n3.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/76fbdcac-53cf-4c97-a06d-173da068b810/f684bb17-3ab2-4bd5-a19e-4c36c57d0ebe/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=61962) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 938.128657] env[61962]: DEBUG oslo_vmware.rw_handles [None req-c0cd2188-095e-4a7d-a70d-63e0d919c4c3 tempest-MigrationsAdminTest-5656953 tempest-MigrationsAdminTest-5656953-project-member] Completed reading data from the image iterator. {{(pid=61962) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 938.129094] env[61962]: DEBUG oslo_vmware.rw_handles [None req-c0cd2188-095e-4a7d-a70d-63e0d919c4c3 tempest-MigrationsAdminTest-5656953 tempest-MigrationsAdminTest-5656953-project-member] Closing write handle for https://esx7c2n3.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/76fbdcac-53cf-4c97-a06d-173da068b810/f684bb17-3ab2-4bd5-a19e-4c36c57d0ebe/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=61962) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:281}} [ 938.285598] env[61962]: DEBUG oslo_vmware.api [None req-3f485ca9-0fd9-450d-ac9a-a3575d3edc94 tempest-ServersAdmin275Test-677477004 tempest-ServersAdmin275Test-677477004-project-member] Task: {'id': task-4891982, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.047704} completed successfully. {{(pid=61962) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 938.285844] env[61962]: DEBUG nova.virt.vmwareapi.ds_util [None req-3f485ca9-0fd9-450d-ac9a-a3575d3edc94 tempest-ServersAdmin275Test-677477004 tempest-ServersAdmin275Test-677477004-project-member] Deleted the datastore file {{(pid=61962) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 938.285960] env[61962]: DEBUG nova.virt.vmwareapi.vmops [None req-3f485ca9-0fd9-450d-ac9a-a3575d3edc94 tempest-ServersAdmin275Test-677477004 tempest-ServersAdmin275Test-677477004-project-member] [instance: b9782756-27e1-46fc-940f-069cfb9ab9f9] Deleted contents of the VM from datastore datastore2 {{(pid=61962) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 938.286148] env[61962]: DEBUG nova.virt.vmwareapi.vmops [None req-3f485ca9-0fd9-450d-ac9a-a3575d3edc94 tempest-ServersAdmin275Test-677477004 tempest-ServersAdmin275Test-677477004-project-member] [instance: b9782756-27e1-46fc-940f-069cfb9ab9f9] Instance destroyed {{(pid=61962) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 938.286322] env[61962]: INFO nova.compute.manager [None req-3f485ca9-0fd9-450d-ac9a-a3575d3edc94 tempest-ServersAdmin275Test-677477004 tempest-ServersAdmin275Test-677477004-project-member] [instance: b9782756-27e1-46fc-940f-069cfb9ab9f9] Took 0.57 seconds to destroy the instance on the hypervisor. [ 938.286561] env[61962]: DEBUG oslo.service.loopingcall [None req-3f485ca9-0fd9-450d-ac9a-a3575d3edc94 tempest-ServersAdmin275Test-677477004 tempest-ServersAdmin275Test-677477004-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61962) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 938.286839] env[61962]: DEBUG nova.compute.manager [-] [instance: b9782756-27e1-46fc-940f-069cfb9ab9f9] Skipping network deallocation for instance since networking was not requested. {{(pid=61962) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2277}} [ 938.289085] env[61962]: DEBUG nova.compute.claims [None req-3f485ca9-0fd9-450d-ac9a-a3575d3edc94 tempest-ServersAdmin275Test-677477004 tempest-ServersAdmin275Test-677477004-project-member] [instance: b9782756-27e1-46fc-940f-069cfb9ab9f9] Aborting claim: {{(pid=61962) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 938.289254] env[61962]: DEBUG oslo_concurrency.lockutils [None req-3f485ca9-0fd9-450d-ac9a-a3575d3edc94 tempest-ServersAdmin275Test-677477004 tempest-ServersAdmin275Test-677477004-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=61962) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 938.289463] env[61962]: DEBUG oslo_concurrency.lockutils [None req-3f485ca9-0fd9-450d-ac9a-a3575d3edc94 tempest-ServersAdmin275Test-677477004 tempest-ServersAdmin275Test-677477004-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 0.000s {{(pid=61962) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 938.719760] env[61962]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0c9ea260-d1ff-410c-9292-3ea981d8d98c {{(pid=61962) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 938.728674] env[61962]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0c35a1b9-c6b0-4c7b-b451-1f5c5f90781a {{(pid=61962) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 938.761032] env[61962]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ee059901-0788-47bd-bea6-4314b05263cc {{(pid=61962) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 938.769892] env[61962]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ec3b4b2e-e0bb-4eb4-837e-a1df10975585 {{(pid=61962) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 938.784468] env[61962]: DEBUG nova.compute.provider_tree [None req-3f485ca9-0fd9-450d-ac9a-a3575d3edc94 tempest-ServersAdmin275Test-677477004 tempest-ServersAdmin275Test-677477004-project-member] Inventory has not changed in ProviderTree for provider: 5a20dc57-fddd-49ec-bab5-953a03eebaa1 {{(pid=61962) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 938.799021] env[61962]: DEBUG nova.scheduler.client.report [None req-3f485ca9-0fd9-450d-ac9a-a3575d3edc94 tempest-ServersAdmin275Test-677477004 tempest-ServersAdmin275Test-677477004-project-member] Inventory has not changed for provider 5a20dc57-fddd-49ec-bab5-953a03eebaa1 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 96, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61962) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 938.835774] env[61962]: DEBUG oslo_concurrency.lockutils [None req-3f485ca9-0fd9-450d-ac9a-a3575d3edc94 tempest-ServersAdmin275Test-677477004 tempest-ServersAdmin275Test-677477004-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 0.546s {{(pid=61962) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 938.836549] env[61962]: ERROR nova.compute.manager [None req-3f485ca9-0fd9-450d-ac9a-a3575d3edc94 tempest-ServersAdmin275Test-677477004 tempest-ServersAdmin275Test-677477004-project-member] [instance: b9782756-27e1-46fc-940f-069cfb9ab9f9] Failed to build and run instance: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 938.836549] env[61962]: Faults: ['InvalidArgument'] [ 938.836549] env[61962]: ERROR nova.compute.manager [instance: b9782756-27e1-46fc-940f-069cfb9ab9f9] Traceback (most recent call last): [ 938.836549] env[61962]: ERROR nova.compute.manager [instance: b9782756-27e1-46fc-940f-069cfb9ab9f9] File "/opt/stack/nova/nova/compute/manager.py", line 2633, in _build_and_run_instance [ 938.836549] env[61962]: ERROR nova.compute.manager [instance: b9782756-27e1-46fc-940f-069cfb9ab9f9] self.driver.spawn(context, instance, image_meta, [ 938.836549] env[61962]: ERROR nova.compute.manager [instance: b9782756-27e1-46fc-940f-069cfb9ab9f9] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 938.836549] env[61962]: ERROR nova.compute.manager [instance: b9782756-27e1-46fc-940f-069cfb9ab9f9] self._vmops.spawn(context, instance, image_meta, injected_files, [ 938.836549] env[61962]: ERROR nova.compute.manager [instance: b9782756-27e1-46fc-940f-069cfb9ab9f9] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 938.836549] env[61962]: ERROR nova.compute.manager [instance: b9782756-27e1-46fc-940f-069cfb9ab9f9] self._fetch_image_if_missing(context, vi) [ 938.836549] env[61962]: ERROR nova.compute.manager [instance: b9782756-27e1-46fc-940f-069cfb9ab9f9] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 938.836549] env[61962]: ERROR nova.compute.manager [instance: b9782756-27e1-46fc-940f-069cfb9ab9f9] image_cache(vi, tmp_image_ds_loc) [ 938.836549] env[61962]: ERROR nova.compute.manager [instance: b9782756-27e1-46fc-940f-069cfb9ab9f9] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 938.836939] env[61962]: ERROR nova.compute.manager [instance: b9782756-27e1-46fc-940f-069cfb9ab9f9] vm_util.copy_virtual_disk( [ 938.836939] env[61962]: ERROR nova.compute.manager [instance: b9782756-27e1-46fc-940f-069cfb9ab9f9] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 938.836939] env[61962]: ERROR nova.compute.manager [instance: b9782756-27e1-46fc-940f-069cfb9ab9f9] session._wait_for_task(vmdk_copy_task) [ 938.836939] env[61962]: ERROR nova.compute.manager [instance: b9782756-27e1-46fc-940f-069cfb9ab9f9] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 938.836939] env[61962]: ERROR nova.compute.manager [instance: b9782756-27e1-46fc-940f-069cfb9ab9f9] return self.wait_for_task(task_ref) [ 938.836939] env[61962]: ERROR nova.compute.manager [instance: b9782756-27e1-46fc-940f-069cfb9ab9f9] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 938.836939] env[61962]: ERROR nova.compute.manager [instance: b9782756-27e1-46fc-940f-069cfb9ab9f9] return evt.wait() [ 938.836939] env[61962]: ERROR nova.compute.manager [instance: b9782756-27e1-46fc-940f-069cfb9ab9f9] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 938.836939] env[61962]: ERROR nova.compute.manager [instance: b9782756-27e1-46fc-940f-069cfb9ab9f9] result = hub.switch() [ 938.836939] env[61962]: ERROR nova.compute.manager [instance: b9782756-27e1-46fc-940f-069cfb9ab9f9] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 938.836939] env[61962]: ERROR nova.compute.manager [instance: b9782756-27e1-46fc-940f-069cfb9ab9f9] return self.greenlet.switch() [ 938.836939] env[61962]: ERROR nova.compute.manager [instance: b9782756-27e1-46fc-940f-069cfb9ab9f9] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 938.836939] env[61962]: ERROR nova.compute.manager [instance: b9782756-27e1-46fc-940f-069cfb9ab9f9] self.f(*self.args, **self.kw) [ 938.837250] env[61962]: ERROR nova.compute.manager [instance: b9782756-27e1-46fc-940f-069cfb9ab9f9] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 938.837250] env[61962]: ERROR nova.compute.manager [instance: b9782756-27e1-46fc-940f-069cfb9ab9f9] raise exceptions.translate_fault(task_info.error) [ 938.837250] env[61962]: ERROR nova.compute.manager [instance: b9782756-27e1-46fc-940f-069cfb9ab9f9] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 938.837250] env[61962]: ERROR nova.compute.manager [instance: b9782756-27e1-46fc-940f-069cfb9ab9f9] Faults: ['InvalidArgument'] [ 938.837250] env[61962]: ERROR nova.compute.manager [instance: b9782756-27e1-46fc-940f-069cfb9ab9f9] [ 938.839053] env[61962]: DEBUG nova.compute.utils [None req-3f485ca9-0fd9-450d-ac9a-a3575d3edc94 tempest-ServersAdmin275Test-677477004 tempest-ServersAdmin275Test-677477004-project-member] [instance: b9782756-27e1-46fc-940f-069cfb9ab9f9] VimFaultException {{(pid=61962) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 938.840084] env[61962]: DEBUG nova.compute.manager [None req-3f485ca9-0fd9-450d-ac9a-a3575d3edc94 tempest-ServersAdmin275Test-677477004 tempest-ServersAdmin275Test-677477004-project-member] [instance: b9782756-27e1-46fc-940f-069cfb9ab9f9] Build of instance b9782756-27e1-46fc-940f-069cfb9ab9f9 was re-scheduled: A specified parameter was not correct: fileType [ 938.840084] env[61962]: Faults: ['InvalidArgument'] {{(pid=61962) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2472}} [ 938.840513] env[61962]: DEBUG nova.compute.manager [None req-3f485ca9-0fd9-450d-ac9a-a3575d3edc94 tempest-ServersAdmin275Test-677477004 tempest-ServersAdmin275Test-677477004-project-member] [instance: b9782756-27e1-46fc-940f-069cfb9ab9f9] Unplugging VIFs for instance {{(pid=61962) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:2998}} [ 938.840742] env[61962]: DEBUG oslo_concurrency.lockutils [None req-3f485ca9-0fd9-450d-ac9a-a3575d3edc94 tempest-ServersAdmin275Test-677477004 tempest-ServersAdmin275Test-677477004-project-member] Acquiring lock "refresh_cache-b9782756-27e1-46fc-940f-069cfb9ab9f9" {{(pid=61962) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 938.840889] env[61962]: DEBUG oslo_concurrency.lockutils [None req-3f485ca9-0fd9-450d-ac9a-a3575d3edc94 tempest-ServersAdmin275Test-677477004 tempest-ServersAdmin275Test-677477004-project-member] Acquired lock "refresh_cache-b9782756-27e1-46fc-940f-069cfb9ab9f9" {{(pid=61962) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 938.841066] env[61962]: DEBUG nova.network.neutron [None req-3f485ca9-0fd9-450d-ac9a-a3575d3edc94 tempest-ServersAdmin275Test-677477004 tempest-ServersAdmin275Test-677477004-project-member] [instance: b9782756-27e1-46fc-940f-069cfb9ab9f9] Building network info cache for instance {{(pid=61962) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 938.872899] env[61962]: DEBUG nova.network.neutron [None req-3f485ca9-0fd9-450d-ac9a-a3575d3edc94 tempest-ServersAdmin275Test-677477004 tempest-ServersAdmin275Test-677477004-project-member] [instance: b9782756-27e1-46fc-940f-069cfb9ab9f9] Instance cache missing network info. {{(pid=61962) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 938.987369] env[61962]: DEBUG nova.network.neutron [None req-3f485ca9-0fd9-450d-ac9a-a3575d3edc94 tempest-ServersAdmin275Test-677477004 tempest-ServersAdmin275Test-677477004-project-member] [instance: b9782756-27e1-46fc-940f-069cfb9ab9f9] Updating instance_info_cache with network_info: [] {{(pid=61962) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 938.998611] env[61962]: DEBUG oslo_concurrency.lockutils [None req-3f485ca9-0fd9-450d-ac9a-a3575d3edc94 tempest-ServersAdmin275Test-677477004 tempest-ServersAdmin275Test-677477004-project-member] Releasing lock "refresh_cache-b9782756-27e1-46fc-940f-069cfb9ab9f9" {{(pid=61962) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 938.998884] env[61962]: DEBUG nova.compute.manager [None req-3f485ca9-0fd9-450d-ac9a-a3575d3edc94 tempest-ServersAdmin275Test-677477004 tempest-ServersAdmin275Test-677477004-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=61962) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3021}} [ 938.999150] env[61962]: DEBUG nova.compute.manager [None req-3f485ca9-0fd9-450d-ac9a-a3575d3edc94 tempest-ServersAdmin275Test-677477004 tempest-ServersAdmin275Test-677477004-project-member] [instance: b9782756-27e1-46fc-940f-069cfb9ab9f9] Skipping network deallocation for instance since networking was not requested. {{(pid=61962) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2277}} [ 939.107564] env[61962]: INFO nova.scheduler.client.report [None req-3f485ca9-0fd9-450d-ac9a-a3575d3edc94 tempest-ServersAdmin275Test-677477004 tempest-ServersAdmin275Test-677477004-project-member] Deleted allocations for instance b9782756-27e1-46fc-940f-069cfb9ab9f9 [ 939.130401] env[61962]: DEBUG oslo_concurrency.lockutils [None req-3f485ca9-0fd9-450d-ac9a-a3575d3edc94 tempest-ServersAdmin275Test-677477004 tempest-ServersAdmin275Test-677477004-project-member] Lock "b9782756-27e1-46fc-940f-069cfb9ab9f9" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 372.358s {{(pid=61962) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 939.131875] env[61962]: DEBUG oslo_concurrency.lockutils [None req-b60d8127-8784-418b-a447-ff0e9406e304 tempest-ServersAdmin275Test-677477004 tempest-ServersAdmin275Test-677477004-project-member] Lock "b9782756-27e1-46fc-940f-069cfb9ab9f9" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 170.134s {{(pid=61962) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 939.132272] env[61962]: DEBUG oslo_concurrency.lockutils [None req-b60d8127-8784-418b-a447-ff0e9406e304 tempest-ServersAdmin275Test-677477004 tempest-ServersAdmin275Test-677477004-project-member] Acquiring lock "b9782756-27e1-46fc-940f-069cfb9ab9f9-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=61962) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 939.132600] env[61962]: DEBUG oslo_concurrency.lockutils [None req-b60d8127-8784-418b-a447-ff0e9406e304 tempest-ServersAdmin275Test-677477004 tempest-ServersAdmin275Test-677477004-project-member] Lock "b9782756-27e1-46fc-940f-069cfb9ab9f9-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=61962) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 939.132972] env[61962]: DEBUG oslo_concurrency.lockutils [None req-b60d8127-8784-418b-a447-ff0e9406e304 tempest-ServersAdmin275Test-677477004 tempest-ServersAdmin275Test-677477004-project-member] Lock "b9782756-27e1-46fc-940f-069cfb9ab9f9-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=61962) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 939.135206] env[61962]: INFO nova.compute.manager [None req-b60d8127-8784-418b-a447-ff0e9406e304 tempest-ServersAdmin275Test-677477004 tempest-ServersAdmin275Test-677477004-project-member] [instance: b9782756-27e1-46fc-940f-069cfb9ab9f9] Terminating instance [ 939.137053] env[61962]: DEBUG oslo_concurrency.lockutils [None req-b60d8127-8784-418b-a447-ff0e9406e304 tempest-ServersAdmin275Test-677477004 tempest-ServersAdmin275Test-677477004-project-member] Acquiring lock "refresh_cache-b9782756-27e1-46fc-940f-069cfb9ab9f9" {{(pid=61962) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 939.137333] env[61962]: DEBUG oslo_concurrency.lockutils [None req-b60d8127-8784-418b-a447-ff0e9406e304 tempest-ServersAdmin275Test-677477004 tempest-ServersAdmin275Test-677477004-project-member] Acquired lock "refresh_cache-b9782756-27e1-46fc-940f-069cfb9ab9f9" {{(pid=61962) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 939.137594] env[61962]: DEBUG nova.network.neutron [None req-b60d8127-8784-418b-a447-ff0e9406e304 tempest-ServersAdmin275Test-677477004 tempest-ServersAdmin275Test-677477004-project-member] [instance: b9782756-27e1-46fc-940f-069cfb9ab9f9] Building network info cache for instance {{(pid=61962) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 939.144553] env[61962]: DEBUG nova.compute.manager [None req-af11b868-1c66-47f6-8ccb-134c532e2986 tempest-ServersAdminTestJSON-987189382 tempest-ServersAdminTestJSON-987189382-project-member] [instance: 3bbd6215-436e-4d30-9c07-b3f69002f3bf] Starting instance... {{(pid=61962) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2424}} [ 939.177200] env[61962]: DEBUG nova.network.neutron [None req-b60d8127-8784-418b-a447-ff0e9406e304 tempest-ServersAdmin275Test-677477004 tempest-ServersAdmin275Test-677477004-project-member] [instance: b9782756-27e1-46fc-940f-069cfb9ab9f9] Instance cache missing network info. {{(pid=61962) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 939.179161] env[61962]: DEBUG nova.compute.manager [None req-af11b868-1c66-47f6-8ccb-134c532e2986 tempest-ServersAdminTestJSON-987189382 tempest-ServersAdminTestJSON-987189382-project-member] [instance: 3bbd6215-436e-4d30-9c07-b3f69002f3bf] Instance disappeared before build. {{(pid=61962) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2431}} [ 939.201656] env[61962]: DEBUG oslo_concurrency.lockutils [None req-af11b868-1c66-47f6-8ccb-134c532e2986 tempest-ServersAdminTestJSON-987189382 tempest-ServersAdminTestJSON-987189382-project-member] Lock "3bbd6215-436e-4d30-9c07-b3f69002f3bf" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 231.390s {{(pid=61962) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 939.215693] env[61962]: DEBUG nova.compute.manager [None req-e1d514b2-824e-487e-a10a-f30e148f321b tempest-AttachInterfacesTestJSON-87713823 tempest-AttachInterfacesTestJSON-87713823-project-member] [instance: 454eebcc-ea18-4046-ba2e-01de1c8a954a] Starting instance... {{(pid=61962) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2424}} [ 939.280989] env[61962]: DEBUG oslo_concurrency.lockutils [None req-e1d514b2-824e-487e-a10a-f30e148f321b tempest-AttachInterfacesTestJSON-87713823 tempest-AttachInterfacesTestJSON-87713823-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61962) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 939.281271] env[61962]: DEBUG oslo_concurrency.lockutils [None req-e1d514b2-824e-487e-a10a-f30e148f321b tempest-AttachInterfacesTestJSON-87713823 tempest-AttachInterfacesTestJSON-87713823-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=61962) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 939.282890] env[61962]: INFO nova.compute.claims [None req-e1d514b2-824e-487e-a10a-f30e148f321b tempest-AttachInterfacesTestJSON-87713823 tempest-AttachInterfacesTestJSON-87713823-project-member] [instance: 454eebcc-ea18-4046-ba2e-01de1c8a954a] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 939.298192] env[61962]: DEBUG nova.network.neutron [None req-b60d8127-8784-418b-a447-ff0e9406e304 tempest-ServersAdmin275Test-677477004 tempest-ServersAdmin275Test-677477004-project-member] [instance: b9782756-27e1-46fc-940f-069cfb9ab9f9] Updating instance_info_cache with network_info: [] {{(pid=61962) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 939.312343] env[61962]: DEBUG oslo_concurrency.lockutils [None req-b60d8127-8784-418b-a447-ff0e9406e304 tempest-ServersAdmin275Test-677477004 tempest-ServersAdmin275Test-677477004-project-member] Releasing lock "refresh_cache-b9782756-27e1-46fc-940f-069cfb9ab9f9" {{(pid=61962) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 939.312954] env[61962]: DEBUG nova.compute.manager [None req-b60d8127-8784-418b-a447-ff0e9406e304 tempest-ServersAdmin275Test-677477004 tempest-ServersAdmin275Test-677477004-project-member] [instance: b9782756-27e1-46fc-940f-069cfb9ab9f9] Start destroying the instance on the hypervisor. {{(pid=61962) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3142}} [ 939.313311] env[61962]: DEBUG nova.virt.vmwareapi.vmops [None req-b60d8127-8784-418b-a447-ff0e9406e304 tempest-ServersAdmin275Test-677477004 tempest-ServersAdmin275Test-677477004-project-member] [instance: b9782756-27e1-46fc-940f-069cfb9ab9f9] Destroying instance {{(pid=61962) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 939.314097] env[61962]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-f5b7da0f-cad7-4230-bd69-1a1e483ce9dd {{(pid=61962) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 939.327696] env[61962]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-512df926-cdd7-4050-9fc0-93b601aa7aa7 {{(pid=61962) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 939.374887] env[61962]: WARNING nova.virt.vmwareapi.vmops [None req-b60d8127-8784-418b-a447-ff0e9406e304 tempest-ServersAdmin275Test-677477004 tempest-ServersAdmin275Test-677477004-project-member] [instance: b9782756-27e1-46fc-940f-069cfb9ab9f9] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance b9782756-27e1-46fc-940f-069cfb9ab9f9 could not be found. [ 939.375106] env[61962]: DEBUG nova.virt.vmwareapi.vmops [None req-b60d8127-8784-418b-a447-ff0e9406e304 tempest-ServersAdmin275Test-677477004 tempest-ServersAdmin275Test-677477004-project-member] [instance: b9782756-27e1-46fc-940f-069cfb9ab9f9] Instance destroyed {{(pid=61962) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 939.375251] env[61962]: INFO nova.compute.manager [None req-b60d8127-8784-418b-a447-ff0e9406e304 tempest-ServersAdmin275Test-677477004 tempest-ServersAdmin275Test-677477004-project-member] [instance: b9782756-27e1-46fc-940f-069cfb9ab9f9] Took 0.06 seconds to destroy the instance on the hypervisor. [ 939.375540] env[61962]: DEBUG oslo.service.loopingcall [None req-b60d8127-8784-418b-a447-ff0e9406e304 tempest-ServersAdmin275Test-677477004 tempest-ServersAdmin275Test-677477004-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61962) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 939.375733] env[61962]: DEBUG nova.compute.manager [-] [instance: b9782756-27e1-46fc-940f-069cfb9ab9f9] Deallocating network for instance {{(pid=61962) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2281}} [ 939.375832] env[61962]: DEBUG nova.network.neutron [-] [instance: b9782756-27e1-46fc-940f-069cfb9ab9f9] deallocate_for_instance() {{(pid=61962) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 939.401434] env[61962]: DEBUG nova.network.neutron [-] [instance: b9782756-27e1-46fc-940f-069cfb9ab9f9] Instance cache missing network info. {{(pid=61962) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 939.411904] env[61962]: DEBUG nova.network.neutron [-] [instance: b9782756-27e1-46fc-940f-069cfb9ab9f9] Updating instance_info_cache with network_info: [] {{(pid=61962) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 939.423247] env[61962]: INFO nova.compute.manager [-] [instance: b9782756-27e1-46fc-940f-069cfb9ab9f9] Took 0.05 seconds to deallocate network for instance. [ 939.558278] env[61962]: DEBUG oslo_concurrency.lockutils [None req-b60d8127-8784-418b-a447-ff0e9406e304 tempest-ServersAdmin275Test-677477004 tempest-ServersAdmin275Test-677477004-project-member] Lock "b9782756-27e1-46fc-940f-069cfb9ab9f9" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 0.426s {{(pid=61962) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 939.729850] env[61962]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-882528f7-5b2b-45a3-9dca-89da73ddb9d1 {{(pid=61962) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 939.739953] env[61962]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e9acfe45-393c-417e-884e-3bfa963c3cf1 {{(pid=61962) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 939.774661] env[61962]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-64d60dc9-a0c7-4d1c-bee0-c55f0eca1ed4 {{(pid=61962) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 939.782470] env[61962]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b868843a-2448-450d-b890-ffe13788e18b {{(pid=61962) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 939.797133] env[61962]: DEBUG nova.compute.provider_tree [None req-e1d514b2-824e-487e-a10a-f30e148f321b tempest-AttachInterfacesTestJSON-87713823 tempest-AttachInterfacesTestJSON-87713823-project-member] Inventory has not changed in ProviderTree for provider: 5a20dc57-fddd-49ec-bab5-953a03eebaa1 {{(pid=61962) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 939.806213] env[61962]: DEBUG nova.scheduler.client.report [None req-e1d514b2-824e-487e-a10a-f30e148f321b tempest-AttachInterfacesTestJSON-87713823 tempest-AttachInterfacesTestJSON-87713823-project-member] Inventory has not changed for provider 5a20dc57-fddd-49ec-bab5-953a03eebaa1 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 96, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61962) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 939.828491] env[61962]: DEBUG oslo_concurrency.lockutils [None req-e1d514b2-824e-487e-a10a-f30e148f321b tempest-AttachInterfacesTestJSON-87713823 tempest-AttachInterfacesTestJSON-87713823-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.547s {{(pid=61962) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 939.828993] env[61962]: DEBUG nova.compute.manager [None req-e1d514b2-824e-487e-a10a-f30e148f321b tempest-AttachInterfacesTestJSON-87713823 tempest-AttachInterfacesTestJSON-87713823-project-member] [instance: 454eebcc-ea18-4046-ba2e-01de1c8a954a] Start building networks asynchronously for instance. {{(pid=61962) _build_resources /opt/stack/nova/nova/compute/manager.py:2821}} [ 939.867547] env[61962]: DEBUG nova.compute.utils [None req-e1d514b2-824e-487e-a10a-f30e148f321b tempest-AttachInterfacesTestJSON-87713823 tempest-AttachInterfacesTestJSON-87713823-project-member] Using /dev/sd instead of None {{(pid=61962) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 939.868942] env[61962]: DEBUG nova.compute.manager [None req-e1d514b2-824e-487e-a10a-f30e148f321b tempest-AttachInterfacesTestJSON-87713823 tempest-AttachInterfacesTestJSON-87713823-project-member] [instance: 454eebcc-ea18-4046-ba2e-01de1c8a954a] Allocating IP information in the background. {{(pid=61962) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1973}} [ 939.869326] env[61962]: DEBUG nova.network.neutron [None req-e1d514b2-824e-487e-a10a-f30e148f321b tempest-AttachInterfacesTestJSON-87713823 tempest-AttachInterfacesTestJSON-87713823-project-member] [instance: 454eebcc-ea18-4046-ba2e-01de1c8a954a] allocate_for_instance() {{(pid=61962) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 939.878968] env[61962]: DEBUG nova.compute.manager [None req-e1d514b2-824e-487e-a10a-f30e148f321b tempest-AttachInterfacesTestJSON-87713823 tempest-AttachInterfacesTestJSON-87713823-project-member] [instance: 454eebcc-ea18-4046-ba2e-01de1c8a954a] Start building block device mappings for instance. {{(pid=61962) _build_resources /opt/stack/nova/nova/compute/manager.py:2856}} [ 939.941427] env[61962]: DEBUG nova.policy [None req-e1d514b2-824e-487e-a10a-f30e148f321b tempest-AttachInterfacesTestJSON-87713823 tempest-AttachInterfacesTestJSON-87713823-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '5adbe55e80cb4334a788f16c9e1a60c4', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'a31ea93dd586435ebb82c1dbfd14c2ad', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61962) authorize /opt/stack/nova/nova/policy.py:203}} [ 939.948074] env[61962]: DEBUG nova.compute.manager [None req-e1d514b2-824e-487e-a10a-f30e148f321b tempest-AttachInterfacesTestJSON-87713823 tempest-AttachInterfacesTestJSON-87713823-project-member] [instance: 454eebcc-ea18-4046-ba2e-01de1c8a954a] Start spawning the instance on the hypervisor. {{(pid=61962) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2630}} [ 939.977260] env[61962]: DEBUG nova.virt.hardware [None req-e1d514b2-824e-487e-a10a-f30e148f321b tempest-AttachInterfacesTestJSON-87713823 tempest-AttachInterfacesTestJSON-87713823-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-12-01T12:09:23Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=128,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-12-01T12:09:07Z,direct_url=,disk_format='vmdk',id=f684bb17-3ab2-4bd5-a19e-4c36c57d0ebe,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='eef556fb5c4f49b889491ae31a496de5',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-12-01T12:09:08Z,virtual_size=,visibility=), allow threads: False {{(pid=61962) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 939.977496] env[61962]: DEBUG nova.virt.hardware [None req-e1d514b2-824e-487e-a10a-f30e148f321b tempest-AttachInterfacesTestJSON-87713823 tempest-AttachInterfacesTestJSON-87713823-project-member] Flavor limits 0:0:0 {{(pid=61962) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 939.977664] env[61962]: DEBUG nova.virt.hardware [None req-e1d514b2-824e-487e-a10a-f30e148f321b tempest-AttachInterfacesTestJSON-87713823 tempest-AttachInterfacesTestJSON-87713823-project-member] Image limits 0:0:0 {{(pid=61962) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 939.977963] env[61962]: DEBUG nova.virt.hardware [None req-e1d514b2-824e-487e-a10a-f30e148f321b tempest-AttachInterfacesTestJSON-87713823 tempest-AttachInterfacesTestJSON-87713823-project-member] Flavor pref 0:0:0 {{(pid=61962) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 939.978708] env[61962]: DEBUG nova.virt.hardware [None req-e1d514b2-824e-487e-a10a-f30e148f321b tempest-AttachInterfacesTestJSON-87713823 tempest-AttachInterfacesTestJSON-87713823-project-member] Image pref 0:0:0 {{(pid=61962) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 939.978920] env[61962]: DEBUG nova.virt.hardware [None req-e1d514b2-824e-487e-a10a-f30e148f321b tempest-AttachInterfacesTestJSON-87713823 tempest-AttachInterfacesTestJSON-87713823-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61962) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 939.979288] env[61962]: DEBUG nova.virt.hardware [None req-e1d514b2-824e-487e-a10a-f30e148f321b tempest-AttachInterfacesTestJSON-87713823 tempest-AttachInterfacesTestJSON-87713823-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61962) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 939.979515] env[61962]: DEBUG nova.virt.hardware [None req-e1d514b2-824e-487e-a10a-f30e148f321b tempest-AttachInterfacesTestJSON-87713823 tempest-AttachInterfacesTestJSON-87713823-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61962) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 939.979714] env[61962]: DEBUG nova.virt.hardware [None req-e1d514b2-824e-487e-a10a-f30e148f321b tempest-AttachInterfacesTestJSON-87713823 tempest-AttachInterfacesTestJSON-87713823-project-member] Got 1 possible topologies {{(pid=61962) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 939.979928] env[61962]: DEBUG nova.virt.hardware [None req-e1d514b2-824e-487e-a10a-f30e148f321b tempest-AttachInterfacesTestJSON-87713823 tempest-AttachInterfacesTestJSON-87713823-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61962) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 939.980135] env[61962]: DEBUG nova.virt.hardware [None req-e1d514b2-824e-487e-a10a-f30e148f321b tempest-AttachInterfacesTestJSON-87713823 tempest-AttachInterfacesTestJSON-87713823-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61962) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 939.981123] env[61962]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7d094ec7-269d-44d3-b859-4f420e95a013 {{(pid=61962) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 939.990465] env[61962]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-68a7bb4e-2e80-4d3f-986c-f8cfe3b22801 {{(pid=61962) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 940.449284] env[61962]: DEBUG nova.network.neutron [None req-e1d514b2-824e-487e-a10a-f30e148f321b tempest-AttachInterfacesTestJSON-87713823 tempest-AttachInterfacesTestJSON-87713823-project-member] [instance: 454eebcc-ea18-4046-ba2e-01de1c8a954a] Successfully created port: 6d51dce0-d3cf-4704-846f-a574c6967603 {{(pid=61962) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 941.417179] env[61962]: DEBUG nova.network.neutron [None req-e1d514b2-824e-487e-a10a-f30e148f321b tempest-AttachInterfacesTestJSON-87713823 tempest-AttachInterfacesTestJSON-87713823-project-member] [instance: 454eebcc-ea18-4046-ba2e-01de1c8a954a] Successfully updated port: 6d51dce0-d3cf-4704-846f-a574c6967603 {{(pid=61962) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 941.429090] env[61962]: DEBUG oslo_concurrency.lockutils [None req-e1d514b2-824e-487e-a10a-f30e148f321b tempest-AttachInterfacesTestJSON-87713823 tempest-AttachInterfacesTestJSON-87713823-project-member] Acquiring lock "refresh_cache-454eebcc-ea18-4046-ba2e-01de1c8a954a" {{(pid=61962) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 941.429263] env[61962]: DEBUG oslo_concurrency.lockutils [None req-e1d514b2-824e-487e-a10a-f30e148f321b tempest-AttachInterfacesTestJSON-87713823 tempest-AttachInterfacesTestJSON-87713823-project-member] Acquired lock "refresh_cache-454eebcc-ea18-4046-ba2e-01de1c8a954a" {{(pid=61962) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 941.429415] env[61962]: DEBUG nova.network.neutron [None req-e1d514b2-824e-487e-a10a-f30e148f321b tempest-AttachInterfacesTestJSON-87713823 tempest-AttachInterfacesTestJSON-87713823-project-member] [instance: 454eebcc-ea18-4046-ba2e-01de1c8a954a] Building network info cache for instance {{(pid=61962) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 941.502955] env[61962]: DEBUG nova.compute.manager [req-8160fccf-f9dd-4668-8e1e-140f41f29f41 req-48bbd0bb-7fe9-44fd-9f82-6bc1c640eb2d service nova] [instance: 454eebcc-ea18-4046-ba2e-01de1c8a954a] Received event network-vif-plugged-6d51dce0-d3cf-4704-846f-a574c6967603 {{(pid=61962) external_instance_event /opt/stack/nova/nova/compute/manager.py:11210}} [ 941.502955] env[61962]: DEBUG oslo_concurrency.lockutils [req-8160fccf-f9dd-4668-8e1e-140f41f29f41 req-48bbd0bb-7fe9-44fd-9f82-6bc1c640eb2d service nova] Acquiring lock "454eebcc-ea18-4046-ba2e-01de1c8a954a-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=61962) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 941.502955] env[61962]: DEBUG oslo_concurrency.lockutils [req-8160fccf-f9dd-4668-8e1e-140f41f29f41 req-48bbd0bb-7fe9-44fd-9f82-6bc1c640eb2d service nova] Lock "454eebcc-ea18-4046-ba2e-01de1c8a954a-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=61962) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 941.502955] env[61962]: DEBUG oslo_concurrency.lockutils [req-8160fccf-f9dd-4668-8e1e-140f41f29f41 req-48bbd0bb-7fe9-44fd-9f82-6bc1c640eb2d service nova] Lock "454eebcc-ea18-4046-ba2e-01de1c8a954a-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=61962) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 941.504708] env[61962]: DEBUG nova.compute.manager [req-8160fccf-f9dd-4668-8e1e-140f41f29f41 req-48bbd0bb-7fe9-44fd-9f82-6bc1c640eb2d service nova] [instance: 454eebcc-ea18-4046-ba2e-01de1c8a954a] No waiting events found dispatching network-vif-plugged-6d51dce0-d3cf-4704-846f-a574c6967603 {{(pid=61962) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 941.505109] env[61962]: WARNING nova.compute.manager [req-8160fccf-f9dd-4668-8e1e-140f41f29f41 req-48bbd0bb-7fe9-44fd-9f82-6bc1c640eb2d service nova] [instance: 454eebcc-ea18-4046-ba2e-01de1c8a954a] Received unexpected event network-vif-plugged-6d51dce0-d3cf-4704-846f-a574c6967603 for instance with vm_state building and task_state spawning. [ 941.536848] env[61962]: DEBUG nova.network.neutron [None req-e1d514b2-824e-487e-a10a-f30e148f321b tempest-AttachInterfacesTestJSON-87713823 tempest-AttachInterfacesTestJSON-87713823-project-member] [instance: 454eebcc-ea18-4046-ba2e-01de1c8a954a] Instance cache missing network info. {{(pid=61962) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 941.820208] env[61962]: DEBUG nova.network.neutron [None req-e1d514b2-824e-487e-a10a-f30e148f321b tempest-AttachInterfacesTestJSON-87713823 tempest-AttachInterfacesTestJSON-87713823-project-member] [instance: 454eebcc-ea18-4046-ba2e-01de1c8a954a] Updating instance_info_cache with network_info: [{"id": "6d51dce0-d3cf-4704-846f-a574c6967603", "address": "fa:16:3e:de:52:65", "network": {"id": "e7c201f8-42a9-48f2-a499-92bcb4866e38", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-63616492-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "a31ea93dd586435ebb82c1dbfd14c2ad", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "41278529-8bd2-44a1-97c8-03967faa3ff7", "external-id": "nsx-vlan-transportzone-749", "segmentation_id": 749, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap6d51dce0-d3", "ovs_interfaceid": "6d51dce0-d3cf-4704-846f-a574c6967603", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61962) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 941.836382] env[61962]: DEBUG oslo_concurrency.lockutils [None req-e1d514b2-824e-487e-a10a-f30e148f321b tempest-AttachInterfacesTestJSON-87713823 tempest-AttachInterfacesTestJSON-87713823-project-member] Releasing lock "refresh_cache-454eebcc-ea18-4046-ba2e-01de1c8a954a" {{(pid=61962) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 941.836682] env[61962]: DEBUG nova.compute.manager [None req-e1d514b2-824e-487e-a10a-f30e148f321b tempest-AttachInterfacesTestJSON-87713823 tempest-AttachInterfacesTestJSON-87713823-project-member] [instance: 454eebcc-ea18-4046-ba2e-01de1c8a954a] Instance network_info: |[{"id": "6d51dce0-d3cf-4704-846f-a574c6967603", "address": "fa:16:3e:de:52:65", "network": {"id": "e7c201f8-42a9-48f2-a499-92bcb4866e38", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-63616492-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "a31ea93dd586435ebb82c1dbfd14c2ad", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "41278529-8bd2-44a1-97c8-03967faa3ff7", "external-id": "nsx-vlan-transportzone-749", "segmentation_id": 749, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap6d51dce0-d3", "ovs_interfaceid": "6d51dce0-d3cf-4704-846f-a574c6967603", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=61962) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 941.837101] env[61962]: DEBUG nova.virt.vmwareapi.vmops [None req-e1d514b2-824e-487e-a10a-f30e148f321b tempest-AttachInterfacesTestJSON-87713823 tempest-AttachInterfacesTestJSON-87713823-project-member] [instance: 454eebcc-ea18-4046-ba2e-01de1c8a954a] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:de:52:65', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '41278529-8bd2-44a1-97c8-03967faa3ff7', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '6d51dce0-d3cf-4704-846f-a574c6967603', 'vif_model': 'vmxnet3'}] {{(pid=61962) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 941.844465] env[61962]: DEBUG nova.virt.vmwareapi.vm_util [None req-e1d514b2-824e-487e-a10a-f30e148f321b tempest-AttachInterfacesTestJSON-87713823 tempest-AttachInterfacesTestJSON-87713823-project-member] Creating folder: Project (a31ea93dd586435ebb82c1dbfd14c2ad). Parent ref: group-v953327. {{(pid=61962) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 941.845009] env[61962]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-1c86db14-1389-4014-adfb-a6825b6f43da {{(pid=61962) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 941.856916] env[61962]: INFO nova.virt.vmwareapi.vm_util [None req-e1d514b2-824e-487e-a10a-f30e148f321b tempest-AttachInterfacesTestJSON-87713823 tempest-AttachInterfacesTestJSON-87713823-project-member] Created folder: Project (a31ea93dd586435ebb82c1dbfd14c2ad) in parent group-v953327. [ 941.857147] env[61962]: DEBUG nova.virt.vmwareapi.vm_util [None req-e1d514b2-824e-487e-a10a-f30e148f321b tempest-AttachInterfacesTestJSON-87713823 tempest-AttachInterfacesTestJSON-87713823-project-member] Creating folder: Instances. Parent ref: group-v953381. {{(pid=61962) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 941.857385] env[61962]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-fbdb958d-10f2-4609-8018-943698484b1c {{(pid=61962) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 941.869574] env[61962]: INFO nova.virt.vmwareapi.vm_util [None req-e1d514b2-824e-487e-a10a-f30e148f321b tempest-AttachInterfacesTestJSON-87713823 tempest-AttachInterfacesTestJSON-87713823-project-member] Created folder: Instances in parent group-v953381. [ 941.869901] env[61962]: DEBUG oslo.service.loopingcall [None req-e1d514b2-824e-487e-a10a-f30e148f321b tempest-AttachInterfacesTestJSON-87713823 tempest-AttachInterfacesTestJSON-87713823-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=61962) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 941.870018] env[61962]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 454eebcc-ea18-4046-ba2e-01de1c8a954a] Creating VM on the ESX host {{(pid=61962) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 941.870231] env[61962]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-ad9690d9-38ce-416a-a733-f02a9c479988 {{(pid=61962) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 941.891151] env[61962]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 941.891151] env[61962]: value = "task-4891985" [ 941.891151] env[61962]: _type = "Task" [ 941.891151] env[61962]: } to complete. {{(pid=61962) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 941.899181] env[61962]: DEBUG oslo_vmware.api [-] Task: {'id': task-4891985, 'name': CreateVM_Task} progress is 0%. {{(pid=61962) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 942.401359] env[61962]: DEBUG oslo_vmware.api [-] Task: {'id': task-4891985, 'name': CreateVM_Task} progress is 99%. {{(pid=61962) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 942.902059] env[61962]: DEBUG oslo_vmware.api [-] Task: {'id': task-4891985, 'name': CreateVM_Task} progress is 99%. {{(pid=61962) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 943.404295] env[61962]: DEBUG oslo_vmware.api [-] Task: {'id': task-4891985, 'name': CreateVM_Task, 'duration_secs': 1.346493} completed successfully. {{(pid=61962) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 943.404480] env[61962]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 454eebcc-ea18-4046-ba2e-01de1c8a954a] Created VM on the ESX host {{(pid=61962) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 943.405237] env[61962]: DEBUG oslo_concurrency.lockutils [None req-e1d514b2-824e-487e-a10a-f30e148f321b tempest-AttachInterfacesTestJSON-87713823 tempest-AttachInterfacesTestJSON-87713823-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/f684bb17-3ab2-4bd5-a19e-4c36c57d0ebe" {{(pid=61962) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 943.405401] env[61962]: DEBUG oslo_concurrency.lockutils [None req-e1d514b2-824e-487e-a10a-f30e148f321b tempest-AttachInterfacesTestJSON-87713823 tempest-AttachInterfacesTestJSON-87713823-project-member] Acquired lock "[datastore2] devstack-image-cache_base/f684bb17-3ab2-4bd5-a19e-4c36c57d0ebe" {{(pid=61962) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 943.405805] env[61962]: DEBUG oslo_concurrency.lockutils [None req-e1d514b2-824e-487e-a10a-f30e148f321b tempest-AttachInterfacesTestJSON-87713823 tempest-AttachInterfacesTestJSON-87713823-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/f684bb17-3ab2-4bd5-a19e-4c36c57d0ebe" {{(pid=61962) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 943.406169] env[61962]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-e816b23f-d27d-4692-bd14-ee45a70f224d {{(pid=61962) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 943.411541] env[61962]: DEBUG oslo_vmware.api [None req-e1d514b2-824e-487e-a10a-f30e148f321b tempest-AttachInterfacesTestJSON-87713823 tempest-AttachInterfacesTestJSON-87713823-project-member] Waiting for the task: (returnval){ [ 943.411541] env[61962]: value = "session[5210918f-aadc-9b29-42fa-0929c2e42627]52c5d735-aaf9-9c27-d073-6bf705a019a3" [ 943.411541] env[61962]: _type = "Task" [ 943.411541] env[61962]: } to complete. {{(pid=61962) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 943.420323] env[61962]: DEBUG oslo_vmware.api [None req-e1d514b2-824e-487e-a10a-f30e148f321b tempest-AttachInterfacesTestJSON-87713823 tempest-AttachInterfacesTestJSON-87713823-project-member] Task: {'id': session[5210918f-aadc-9b29-42fa-0929c2e42627]52c5d735-aaf9-9c27-d073-6bf705a019a3, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61962) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 943.920969] env[61962]: DEBUG nova.compute.manager [req-de65693c-4678-4d11-a68a-5a165913dcbe req-2880b733-d0d1-48e2-9cd0-b8913d142d9d service nova] [instance: 454eebcc-ea18-4046-ba2e-01de1c8a954a] Received event network-changed-6d51dce0-d3cf-4704-846f-a574c6967603 {{(pid=61962) external_instance_event /opt/stack/nova/nova/compute/manager.py:11210}} [ 943.921233] env[61962]: DEBUG nova.compute.manager [req-de65693c-4678-4d11-a68a-5a165913dcbe req-2880b733-d0d1-48e2-9cd0-b8913d142d9d service nova] [instance: 454eebcc-ea18-4046-ba2e-01de1c8a954a] Refreshing instance network info cache due to event network-changed-6d51dce0-d3cf-4704-846f-a574c6967603. {{(pid=61962) external_instance_event /opt/stack/nova/nova/compute/manager.py:11215}} [ 943.921417] env[61962]: DEBUG oslo_concurrency.lockutils [req-de65693c-4678-4d11-a68a-5a165913dcbe req-2880b733-d0d1-48e2-9cd0-b8913d142d9d service nova] Acquiring lock "refresh_cache-454eebcc-ea18-4046-ba2e-01de1c8a954a" {{(pid=61962) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 943.921555] env[61962]: DEBUG oslo_concurrency.lockutils [req-de65693c-4678-4d11-a68a-5a165913dcbe req-2880b733-d0d1-48e2-9cd0-b8913d142d9d service nova] Acquired lock "refresh_cache-454eebcc-ea18-4046-ba2e-01de1c8a954a" {{(pid=61962) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 943.921833] env[61962]: DEBUG nova.network.neutron [req-de65693c-4678-4d11-a68a-5a165913dcbe req-2880b733-d0d1-48e2-9cd0-b8913d142d9d service nova] [instance: 454eebcc-ea18-4046-ba2e-01de1c8a954a] Refreshing network info cache for port 6d51dce0-d3cf-4704-846f-a574c6967603 {{(pid=61962) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 943.930873] env[61962]: DEBUG oslo_concurrency.lockutils [None req-e1d514b2-824e-487e-a10a-f30e148f321b tempest-AttachInterfacesTestJSON-87713823 tempest-AttachInterfacesTestJSON-87713823-project-member] Releasing lock "[datastore2] devstack-image-cache_base/f684bb17-3ab2-4bd5-a19e-4c36c57d0ebe" {{(pid=61962) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 943.931461] env[61962]: DEBUG nova.virt.vmwareapi.vmops [None req-e1d514b2-824e-487e-a10a-f30e148f321b tempest-AttachInterfacesTestJSON-87713823 tempest-AttachInterfacesTestJSON-87713823-project-member] [instance: 454eebcc-ea18-4046-ba2e-01de1c8a954a] Processing image f684bb17-3ab2-4bd5-a19e-4c36c57d0ebe {{(pid=61962) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 943.931461] env[61962]: DEBUG oslo_concurrency.lockutils [None req-e1d514b2-824e-487e-a10a-f30e148f321b tempest-AttachInterfacesTestJSON-87713823 tempest-AttachInterfacesTestJSON-87713823-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/f684bb17-3ab2-4bd5-a19e-4c36c57d0ebe/f684bb17-3ab2-4bd5-a19e-4c36c57d0ebe.vmdk" {{(pid=61962) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 944.336597] env[61962]: DEBUG nova.network.neutron [req-de65693c-4678-4d11-a68a-5a165913dcbe req-2880b733-d0d1-48e2-9cd0-b8913d142d9d service nova] [instance: 454eebcc-ea18-4046-ba2e-01de1c8a954a] Updated VIF entry in instance network info cache for port 6d51dce0-d3cf-4704-846f-a574c6967603. {{(pid=61962) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 944.336597] env[61962]: DEBUG nova.network.neutron [req-de65693c-4678-4d11-a68a-5a165913dcbe req-2880b733-d0d1-48e2-9cd0-b8913d142d9d service nova] [instance: 454eebcc-ea18-4046-ba2e-01de1c8a954a] Updating instance_info_cache with network_info: [{"id": "6d51dce0-d3cf-4704-846f-a574c6967603", "address": "fa:16:3e:de:52:65", "network": {"id": "e7c201f8-42a9-48f2-a499-92bcb4866e38", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-63616492-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "a31ea93dd586435ebb82c1dbfd14c2ad", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "41278529-8bd2-44a1-97c8-03967faa3ff7", "external-id": "nsx-vlan-transportzone-749", "segmentation_id": 749, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap6d51dce0-d3", "ovs_interfaceid": "6d51dce0-d3cf-4704-846f-a574c6967603", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61962) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 944.348492] env[61962]: DEBUG oslo_concurrency.lockutils [req-de65693c-4678-4d11-a68a-5a165913dcbe req-2880b733-d0d1-48e2-9cd0-b8913d142d9d service nova] Releasing lock "refresh_cache-454eebcc-ea18-4046-ba2e-01de1c8a954a" {{(pid=61962) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 947.583876] env[61962]: DEBUG oslo_concurrency.lockutils [None req-7a230fc5-6af3-471e-9f4a-043813914279 tempest-ServerActionsTestJSON-2003961310 tempest-ServerActionsTestJSON-2003961310-project-member] Acquiring lock "5572a1d3-5d2f-4af5-b9f6-ec57506e2069" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61962) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 947.584479] env[61962]: DEBUG oslo_concurrency.lockutils [None req-7a230fc5-6af3-471e-9f4a-043813914279 tempest-ServerActionsTestJSON-2003961310 tempest-ServerActionsTestJSON-2003961310-project-member] Lock "5572a1d3-5d2f-4af5-b9f6-ec57506e2069" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=61962) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 955.905137] env[61962]: DEBUG oslo_concurrency.lockutils [None req-091a91b0-398d-4b28-8365-8b8cb4b8e66d tempest-MigrationsAdminTest-5656953 tempest-MigrationsAdminTest-5656953-project-member] Acquiring lock "98c88a0d-07d4-4bec-8722-0afe732886ac" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61962) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 955.905440] env[61962]: DEBUG oslo_concurrency.lockutils [None req-091a91b0-398d-4b28-8365-8b8cb4b8e66d tempest-MigrationsAdminTest-5656953 tempest-MigrationsAdminTest-5656953-project-member] Lock "98c88a0d-07d4-4bec-8722-0afe732886ac" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61962) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 977.335307] env[61962]: DEBUG oslo_concurrency.lockutils [None req-5481884c-a19d-4a9d-9904-b78ff198a034 tempest-ServerGroupTestJSON-904706614 tempest-ServerGroupTestJSON-904706614-project-member] Acquiring lock "b42bffa2-bf04-4287-9ae8-8cbbe107893a" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61962) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 977.335596] env[61962]: DEBUG oslo_concurrency.lockutils [None req-5481884c-a19d-4a9d-9904-b78ff198a034 tempest-ServerGroupTestJSON-904706614 tempest-ServerGroupTestJSON-904706614-project-member] Lock "b42bffa2-bf04-4287-9ae8-8cbbe107893a" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61962) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 978.646689] env[61962]: DEBUG oslo_concurrency.lockutils [None req-e0071317-e8e9-4b26-bfd6-1ee93b62c913 tempest-DeleteServersAdminTestJSON-710750285 tempest-DeleteServersAdminTestJSON-710750285-project-member] Acquiring lock "a47d03b4-71aa-43e9-bfa4-d2a07ea89fb8" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61962) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 978.650168] env[61962]: DEBUG oslo_concurrency.lockutils [None req-e0071317-e8e9-4b26-bfd6-1ee93b62c913 tempest-DeleteServersAdminTestJSON-710750285 tempest-DeleteServersAdminTestJSON-710750285-project-member] Lock "a47d03b4-71aa-43e9-bfa4-d2a07ea89fb8" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61962) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 980.156028] env[61962]: DEBUG oslo_service.periodic_task [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Running periodic task ComputeManager._sync_scheduler_instance_info {{(pid=61962) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 981.158734] env[61962]: DEBUG oslo_service.periodic_task [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=61962) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 981.159068] env[61962]: DEBUG nova.compute.manager [None req-ba337279-4320-40ca-b616-7e590432f203 None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=61962) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10626}} [ 981.159068] env[61962]: DEBUG oslo_service.periodic_task [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Running periodic task ComputeManager.update_available_resource {{(pid=61962) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 981.171255] env[61962]: DEBUG oslo_concurrency.lockutils [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=61962) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 981.171519] env[61962]: DEBUG oslo_concurrency.lockutils [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=61962) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 981.171615] env[61962]: DEBUG oslo_concurrency.lockutils [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=61962) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 981.171753] env[61962]: DEBUG nova.compute.resource_tracker [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=61962) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 981.172906] env[61962]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-567253b8-5fdb-4997-9f06-d10f9f0ad040 {{(pid=61962) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 981.188169] env[61962]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a0efdc96-55b9-46a2-9ee6-64a18f41d91a {{(pid=61962) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 981.205339] env[61962]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6ae70894-85f9-4844-82d6-6dc6425b9dc3 {{(pid=61962) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 981.216019] env[61962]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b5eab15f-ce6b-4926-8540-0224d53cc569 {{(pid=61962) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 981.245971] env[61962]: DEBUG nova.compute.resource_tracker [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=180574MB free_disk=96GB free_vcpus=48 pci_devices=None {{(pid=61962) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 981.246217] env[61962]: DEBUG oslo_concurrency.lockutils [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=61962) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 981.246337] env[61962]: DEBUG oslo_concurrency.lockutils [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=61962) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 981.335665] env[61962]: DEBUG nova.compute.resource_tracker [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Instance a6e6f30d-c667-4f70-bf60-596a8fe97188 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61962) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 981.335811] env[61962]: DEBUG nova.compute.resource_tracker [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Instance 667f56c0-896f-4e25-9e8a-3954f7733dc0 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61962) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 981.335934] env[61962]: DEBUG nova.compute.resource_tracker [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Instance 295064f0-9677-4287-877a-ef2a33a87fef actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61962) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 981.336065] env[61962]: DEBUG nova.compute.resource_tracker [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Instance 6996289c-f881-4733-8943-98e3633e43ca actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61962) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 981.336180] env[61962]: DEBUG nova.compute.resource_tracker [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Instance 85c6b6af-f1b5-47ca-8e02-61c3669ec02b actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61962) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 981.336293] env[61962]: DEBUG nova.compute.resource_tracker [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Instance 0aa340f7-a219-47f5-9d7d-dd4062643cf7 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61962) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 981.336412] env[61962]: DEBUG nova.compute.resource_tracker [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Instance ff7d9da5-bc9a-41f2-a2e4-521d87b22117 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61962) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 981.336538] env[61962]: DEBUG nova.compute.resource_tracker [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Instance bf82c44c-95ce-4e4c-a32f-444bba1ef5cd actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61962) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 981.336632] env[61962]: DEBUG nova.compute.resource_tracker [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Instance 1dcafe3b-989b-4290-be54-ceccc62a323f actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61962) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 981.336745] env[61962]: DEBUG nova.compute.resource_tracker [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Instance 454eebcc-ea18-4046-ba2e-01de1c8a954a actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61962) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 981.354020] env[61962]: DEBUG nova.compute.resource_tracker [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Instance e76b5908-3bad-4c19-90e7-b15ca253ad80 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61962) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1767}} [ 981.367633] env[61962]: DEBUG nova.compute.resource_tracker [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Instance c17b6733-edea-4db9-b937-c38585eb92bf has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61962) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1767}} [ 981.386149] env[61962]: DEBUG nova.compute.resource_tracker [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Instance 1228fef1-3837-4a1b-8c88-fc1e27af6b4f has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61962) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1767}} [ 981.399895] env[61962]: DEBUG nova.compute.resource_tracker [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Instance 66eb1344-b692-4f1c-a5ac-b3518e350741 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61962) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1767}} [ 981.413803] env[61962]: DEBUG nova.compute.resource_tracker [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Instance b33ecac7-cb2f-438c-9ee4-7cd53388e318 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61962) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1767}} [ 981.429469] env[61962]: DEBUG nova.compute.resource_tracker [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Instance 252f4add-476d-46bc-b369-c8d66a512142 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61962) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1767}} [ 981.446021] env[61962]: DEBUG nova.compute.resource_tracker [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Instance 647d141e-1aaf-4e5e-b0a3-31c9bee0adf7 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61962) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1767}} [ 981.462654] env[61962]: DEBUG nova.compute.resource_tracker [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Instance fb33811b-242a-4cce-ae11-c004e507e7de has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61962) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1767}} [ 981.478202] env[61962]: DEBUG nova.compute.resource_tracker [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Instance c5eca2f9-f569-4510-882b-217c2c28ec69 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61962) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1767}} [ 981.493177] env[61962]: DEBUG nova.compute.resource_tracker [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Instance 684d7213-d30a-4881-8e13-9817d6765074 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61962) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1767}} [ 981.758254] env[61962]: DEBUG nova.compute.resource_tracker [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Instance 9cf159b3-53c8-4002-9e9b-5c0f82e1e555 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61962) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1767}} [ 981.772781] env[61962]: DEBUG nova.compute.resource_tracker [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Instance 306d093c-40fa-4863-b725-b3fc65042dce has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61962) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1767}} [ 981.786081] env[61962]: DEBUG nova.compute.resource_tracker [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Instance f53ed31d-4cca-418d-b708-4797dbc5920f has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61962) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1767}} [ 981.801327] env[61962]: DEBUG nova.compute.resource_tracker [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Instance 8ecd73a7-7f23-4994-85b9-479584831687 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61962) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1767}} [ 981.814709] env[61962]: DEBUG nova.compute.resource_tracker [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Instance d89abe69-dd71-4cfe-b80b-3b2aeba0b8a9 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61962) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1767}} [ 981.826552] env[61962]: DEBUG nova.compute.resource_tracker [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Instance 5572a1d3-5d2f-4af5-b9f6-ec57506e2069 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61962) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1767}} [ 981.839561] env[61962]: DEBUG nova.compute.resource_tracker [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Instance 98c88a0d-07d4-4bec-8722-0afe732886ac has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61962) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1767}} [ 981.850223] env[61962]: DEBUG nova.compute.resource_tracker [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Instance b42bffa2-bf04-4287-9ae8-8cbbe107893a has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61962) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1767}} [ 981.863905] env[61962]: DEBUG nova.compute.resource_tracker [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Instance a47d03b4-71aa-43e9-bfa4-d2a07ea89fb8 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61962) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1767}} [ 981.864249] env[61962]: DEBUG nova.compute.resource_tracker [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Total usable vcpus: 48, total allocated vcpus: 10 {{(pid=61962) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 981.864418] env[61962]: DEBUG nova.compute.resource_tracker [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=1792MB phys_disk=100GB used_disk=10GB total_vcpus=48 used_vcpus=10 pci_stats=[] stats={'failed_builds': '26', 'num_instances': '10', 'num_vm_building': '10', 'num_task_deleting': '9', 'num_os_type_None': '10', 'num_proj_6e7e747becdf4976b570b79d4b20d88b': '1', 'io_workload': '10', 'num_proj_6c00b1a76e464ad2a9788673e256de22': '1', 'num_proj_78e44aa9cd5642928627210447b3e177': '1', 'num_proj_b4cf0a2155644882b4bf42e69e210138': '1', 'num_proj_ed943c69c73d48e2b1b2bab311295d84': '2', 'num_proj_5481169540de4205a1ff7731132eb3a7': '1', 'num_proj_389fb8d4c2ce47829fe23a6ca60e9bdd': '1', 'num_proj_7738f0dd32484bf9978c5d006d088c3b': '1', 'num_task_spawning': '1', 'num_proj_a31ea93dd586435ebb82c1dbfd14c2ad': '1'} {{(pid=61962) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 982.352390] env[61962]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-de2e205a-129e-4bb6-96a2-9958b03f8721 {{(pid=61962) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 982.363763] env[61962]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d9c0d8ab-b707-491e-b6ce-04096f79445e {{(pid=61962) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 982.399298] env[61962]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a2829429-4f6a-45cd-b399-039f4df3c440 {{(pid=61962) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 982.409632] env[61962]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-10a0299d-b0b0-4951-81ca-3c8c64dbcef9 {{(pid=61962) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 982.422897] env[61962]: DEBUG nova.compute.provider_tree [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Inventory has not changed in ProviderTree for provider: 5a20dc57-fddd-49ec-bab5-953a03eebaa1 {{(pid=61962) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 982.432948] env[61962]: DEBUG nova.scheduler.client.report [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Inventory has not changed for provider 5a20dc57-fddd-49ec-bab5-953a03eebaa1 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 96, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61962) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 982.451816] env[61962]: DEBUG nova.compute.resource_tracker [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=61962) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 982.452218] env[61962]: DEBUG oslo_concurrency.lockutils [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 1.206s {{(pid=61962) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 983.296708] env[61962]: WARNING oslo_vmware.rw_handles [None req-c0cd2188-095e-4a7d-a70d-63e0d919c4c3 tempest-MigrationsAdminTest-5656953 tempest-MigrationsAdminTest-5656953-project-member] Error occurred while reading the HTTP response.: http.client.RemoteDisconnected: Remote end closed connection without response [ 983.296708] env[61962]: ERROR oslo_vmware.rw_handles Traceback (most recent call last): [ 983.296708] env[61962]: ERROR oslo_vmware.rw_handles File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py", line 283, in close [ 983.296708] env[61962]: ERROR oslo_vmware.rw_handles self._conn.getresponse() [ 983.296708] env[61962]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 1375, in getresponse [ 983.296708] env[61962]: ERROR oslo_vmware.rw_handles response.begin() [ 983.296708] env[61962]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 318, in begin [ 983.296708] env[61962]: ERROR oslo_vmware.rw_handles version, status, reason = self._read_status() [ 983.296708] env[61962]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 287, in _read_status [ 983.296708] env[61962]: ERROR oslo_vmware.rw_handles raise RemoteDisconnected("Remote end closed connection without" [ 983.296708] env[61962]: ERROR oslo_vmware.rw_handles http.client.RemoteDisconnected: Remote end closed connection without response [ 983.296708] env[61962]: ERROR oslo_vmware.rw_handles [ 983.297392] env[61962]: DEBUG nova.virt.vmwareapi.images [None req-c0cd2188-095e-4a7d-a70d-63e0d919c4c3 tempest-MigrationsAdminTest-5656953 tempest-MigrationsAdminTest-5656953-project-member] [instance: a6e6f30d-c667-4f70-bf60-596a8fe97188] Downloaded image file data f684bb17-3ab2-4bd5-a19e-4c36c57d0ebe to vmware_temp/76fbdcac-53cf-4c97-a06d-173da068b810/f684bb17-3ab2-4bd5-a19e-4c36c57d0ebe/tmp-sparse.vmdk on the data store datastore2 {{(pid=61962) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:258}} [ 983.299744] env[61962]: DEBUG nova.virt.vmwareapi.vmops [None req-c0cd2188-095e-4a7d-a70d-63e0d919c4c3 tempest-MigrationsAdminTest-5656953 tempest-MigrationsAdminTest-5656953-project-member] [instance: a6e6f30d-c667-4f70-bf60-596a8fe97188] Caching image {{(pid=61962) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 983.305344] env[61962]: DEBUG nova.virt.vmwareapi.vm_util [None req-c0cd2188-095e-4a7d-a70d-63e0d919c4c3 tempest-MigrationsAdminTest-5656953 tempest-MigrationsAdminTest-5656953-project-member] Copying Virtual Disk [datastore2] vmware_temp/76fbdcac-53cf-4c97-a06d-173da068b810/f684bb17-3ab2-4bd5-a19e-4c36c57d0ebe/tmp-sparse.vmdk to [datastore2] vmware_temp/76fbdcac-53cf-4c97-a06d-173da068b810/f684bb17-3ab2-4bd5-a19e-4c36c57d0ebe/f684bb17-3ab2-4bd5-a19e-4c36c57d0ebe.vmdk {{(pid=61962) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 983.305344] env[61962]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-941a0d6d-f438-49d0-b769-ed10ab02ce9b {{(pid=61962) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 983.311396] env[61962]: DEBUG oslo_vmware.api [None req-c0cd2188-095e-4a7d-a70d-63e0d919c4c3 tempest-MigrationsAdminTest-5656953 tempest-MigrationsAdminTest-5656953-project-member] Waiting for the task: (returnval){ [ 983.311396] env[61962]: value = "task-4891986" [ 983.311396] env[61962]: _type = "Task" [ 983.311396] env[61962]: } to complete. {{(pid=61962) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 983.322493] env[61962]: DEBUG oslo_vmware.api [None req-c0cd2188-095e-4a7d-a70d-63e0d919c4c3 tempest-MigrationsAdminTest-5656953 tempest-MigrationsAdminTest-5656953-project-member] Task: {'id': task-4891986, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61962) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 983.609610] env[61962]: DEBUG oslo_concurrency.lockutils [None req-557bce10-54a3-4a5d-b0cf-bd9d351fb139 tempest-ServerAddressesTestJSON-194346181 tempest-ServerAddressesTestJSON-194346181-project-member] Acquiring lock "2a085633-9439-464b-b0c7-00e68ed46fb3" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61962) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 983.609967] env[61962]: DEBUG oslo_concurrency.lockutils [None req-557bce10-54a3-4a5d-b0cf-bd9d351fb139 tempest-ServerAddressesTestJSON-194346181 tempest-ServerAddressesTestJSON-194346181-project-member] Lock "2a085633-9439-464b-b0c7-00e68ed46fb3" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61962) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 983.822288] env[61962]: DEBUG oslo_vmware.exceptions [None req-c0cd2188-095e-4a7d-a70d-63e0d919c4c3 tempest-MigrationsAdminTest-5656953 tempest-MigrationsAdminTest-5656953-project-member] Fault InvalidArgument not matched. {{(pid=61962) get_fault_class /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/exceptions.py:290}} [ 983.822579] env[61962]: DEBUG oslo_concurrency.lockutils [None req-c0cd2188-095e-4a7d-a70d-63e0d919c4c3 tempest-MigrationsAdminTest-5656953 tempest-MigrationsAdminTest-5656953-project-member] Releasing lock "[datastore2] devstack-image-cache_base/f684bb17-3ab2-4bd5-a19e-4c36c57d0ebe/f684bb17-3ab2-4bd5-a19e-4c36c57d0ebe.vmdk" {{(pid=61962) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 983.823169] env[61962]: ERROR nova.compute.manager [None req-c0cd2188-095e-4a7d-a70d-63e0d919c4c3 tempest-MigrationsAdminTest-5656953 tempest-MigrationsAdminTest-5656953-project-member] [instance: a6e6f30d-c667-4f70-bf60-596a8fe97188] Instance failed to spawn: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 983.823169] env[61962]: Faults: ['InvalidArgument'] [ 983.823169] env[61962]: ERROR nova.compute.manager [instance: a6e6f30d-c667-4f70-bf60-596a8fe97188] Traceback (most recent call last): [ 983.823169] env[61962]: ERROR nova.compute.manager [instance: a6e6f30d-c667-4f70-bf60-596a8fe97188] File "/opt/stack/nova/nova/compute/manager.py", line 2886, in _build_resources [ 983.823169] env[61962]: ERROR nova.compute.manager [instance: a6e6f30d-c667-4f70-bf60-596a8fe97188] yield resources [ 983.823169] env[61962]: ERROR nova.compute.manager [instance: a6e6f30d-c667-4f70-bf60-596a8fe97188] File "/opt/stack/nova/nova/compute/manager.py", line 2633, in _build_and_run_instance [ 983.823169] env[61962]: ERROR nova.compute.manager [instance: a6e6f30d-c667-4f70-bf60-596a8fe97188] self.driver.spawn(context, instance, image_meta, [ 983.823169] env[61962]: ERROR nova.compute.manager [instance: a6e6f30d-c667-4f70-bf60-596a8fe97188] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 983.823169] env[61962]: ERROR nova.compute.manager [instance: a6e6f30d-c667-4f70-bf60-596a8fe97188] self._vmops.spawn(context, instance, image_meta, injected_files, [ 983.823169] env[61962]: ERROR nova.compute.manager [instance: a6e6f30d-c667-4f70-bf60-596a8fe97188] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 983.823169] env[61962]: ERROR nova.compute.manager [instance: a6e6f30d-c667-4f70-bf60-596a8fe97188] self._fetch_image_if_missing(context, vi) [ 983.823169] env[61962]: ERROR nova.compute.manager [instance: a6e6f30d-c667-4f70-bf60-596a8fe97188] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 983.823511] env[61962]: ERROR nova.compute.manager [instance: a6e6f30d-c667-4f70-bf60-596a8fe97188] image_cache(vi, tmp_image_ds_loc) [ 983.823511] env[61962]: ERROR nova.compute.manager [instance: a6e6f30d-c667-4f70-bf60-596a8fe97188] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 983.823511] env[61962]: ERROR nova.compute.manager [instance: a6e6f30d-c667-4f70-bf60-596a8fe97188] vm_util.copy_virtual_disk( [ 983.823511] env[61962]: ERROR nova.compute.manager [instance: a6e6f30d-c667-4f70-bf60-596a8fe97188] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 983.823511] env[61962]: ERROR nova.compute.manager [instance: a6e6f30d-c667-4f70-bf60-596a8fe97188] session._wait_for_task(vmdk_copy_task) [ 983.823511] env[61962]: ERROR nova.compute.manager [instance: a6e6f30d-c667-4f70-bf60-596a8fe97188] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 983.823511] env[61962]: ERROR nova.compute.manager [instance: a6e6f30d-c667-4f70-bf60-596a8fe97188] return self.wait_for_task(task_ref) [ 983.823511] env[61962]: ERROR nova.compute.manager [instance: a6e6f30d-c667-4f70-bf60-596a8fe97188] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 983.823511] env[61962]: ERROR nova.compute.manager [instance: a6e6f30d-c667-4f70-bf60-596a8fe97188] return evt.wait() [ 983.823511] env[61962]: ERROR nova.compute.manager [instance: a6e6f30d-c667-4f70-bf60-596a8fe97188] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 983.823511] env[61962]: ERROR nova.compute.manager [instance: a6e6f30d-c667-4f70-bf60-596a8fe97188] result = hub.switch() [ 983.823511] env[61962]: ERROR nova.compute.manager [instance: a6e6f30d-c667-4f70-bf60-596a8fe97188] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 983.823511] env[61962]: ERROR nova.compute.manager [instance: a6e6f30d-c667-4f70-bf60-596a8fe97188] return self.greenlet.switch() [ 983.823846] env[61962]: ERROR nova.compute.manager [instance: a6e6f30d-c667-4f70-bf60-596a8fe97188] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 983.823846] env[61962]: ERROR nova.compute.manager [instance: a6e6f30d-c667-4f70-bf60-596a8fe97188] self.f(*self.args, **self.kw) [ 983.823846] env[61962]: ERROR nova.compute.manager [instance: a6e6f30d-c667-4f70-bf60-596a8fe97188] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 983.823846] env[61962]: ERROR nova.compute.manager [instance: a6e6f30d-c667-4f70-bf60-596a8fe97188] raise exceptions.translate_fault(task_info.error) [ 983.823846] env[61962]: ERROR nova.compute.manager [instance: a6e6f30d-c667-4f70-bf60-596a8fe97188] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 983.823846] env[61962]: ERROR nova.compute.manager [instance: a6e6f30d-c667-4f70-bf60-596a8fe97188] Faults: ['InvalidArgument'] [ 983.823846] env[61962]: ERROR nova.compute.manager [instance: a6e6f30d-c667-4f70-bf60-596a8fe97188] [ 983.823846] env[61962]: INFO nova.compute.manager [None req-c0cd2188-095e-4a7d-a70d-63e0d919c4c3 tempest-MigrationsAdminTest-5656953 tempest-MigrationsAdminTest-5656953-project-member] [instance: a6e6f30d-c667-4f70-bf60-596a8fe97188] Terminating instance [ 983.825265] env[61962]: DEBUG oslo_concurrency.lockutils [None req-719fe7db-a9f6-4ffa-bab2-00a4fa9c93c0 tempest-FloatingIPsAssociationTestJSON-152212675 tempest-FloatingIPsAssociationTestJSON-152212675-project-member] Acquired lock "[datastore2] devstack-image-cache_base/f684bb17-3ab2-4bd5-a19e-4c36c57d0ebe/f684bb17-3ab2-4bd5-a19e-4c36c57d0ebe.vmdk" {{(pid=61962) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 983.825484] env[61962]: DEBUG nova.virt.vmwareapi.ds_util [None req-719fe7db-a9f6-4ffa-bab2-00a4fa9c93c0 tempest-FloatingIPsAssociationTestJSON-152212675 tempest-FloatingIPsAssociationTestJSON-152212675-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=61962) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 983.825877] env[61962]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-1781e09e-9f65-4a34-a0b1-4fbb9bb86101 {{(pid=61962) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 983.828293] env[61962]: DEBUG nova.compute.manager [None req-c0cd2188-095e-4a7d-a70d-63e0d919c4c3 tempest-MigrationsAdminTest-5656953 tempest-MigrationsAdminTest-5656953-project-member] [instance: a6e6f30d-c667-4f70-bf60-596a8fe97188] Start destroying the instance on the hypervisor. {{(pid=61962) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3142}} [ 983.828492] env[61962]: DEBUG nova.virt.vmwareapi.vmops [None req-c0cd2188-095e-4a7d-a70d-63e0d919c4c3 tempest-MigrationsAdminTest-5656953 tempest-MigrationsAdminTest-5656953-project-member] [instance: a6e6f30d-c667-4f70-bf60-596a8fe97188] Destroying instance {{(pid=61962) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 983.829302] env[61962]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-040658f7-a832-4bec-b99c-5ed0ac86ac3e {{(pid=61962) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 983.837240] env[61962]: DEBUG nova.virt.vmwareapi.vmops [None req-c0cd2188-095e-4a7d-a70d-63e0d919c4c3 tempest-MigrationsAdminTest-5656953 tempest-MigrationsAdminTest-5656953-project-member] [instance: a6e6f30d-c667-4f70-bf60-596a8fe97188] Unregistering the VM {{(pid=61962) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 983.837496] env[61962]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-d28568d8-7522-4d4a-b3ba-678d9869c187 {{(pid=61962) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 983.840081] env[61962]: DEBUG nova.virt.vmwareapi.ds_util [None req-719fe7db-a9f6-4ffa-bab2-00a4fa9c93c0 tempest-FloatingIPsAssociationTestJSON-152212675 tempest-FloatingIPsAssociationTestJSON-152212675-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=61962) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 983.840263] env[61962]: DEBUG nova.virt.vmwareapi.vmops [None req-719fe7db-a9f6-4ffa-bab2-00a4fa9c93c0 tempest-FloatingIPsAssociationTestJSON-152212675 tempest-FloatingIPsAssociationTestJSON-152212675-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=61962) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 983.841260] env[61962]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-8123533c-1358-41d9-b94b-ece7f4c4b690 {{(pid=61962) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 983.847130] env[61962]: DEBUG oslo_vmware.api [None req-719fe7db-a9f6-4ffa-bab2-00a4fa9c93c0 tempest-FloatingIPsAssociationTestJSON-152212675 tempest-FloatingIPsAssociationTestJSON-152212675-project-member] Waiting for the task: (returnval){ [ 983.847130] env[61962]: value = "session[5210918f-aadc-9b29-42fa-0929c2e42627]52d5d292-c732-6204-7901-74e15c4ab891" [ 983.847130] env[61962]: _type = "Task" [ 983.847130] env[61962]: } to complete. {{(pid=61962) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 983.861074] env[61962]: DEBUG oslo_vmware.api [None req-719fe7db-a9f6-4ffa-bab2-00a4fa9c93c0 tempest-FloatingIPsAssociationTestJSON-152212675 tempest-FloatingIPsAssociationTestJSON-152212675-project-member] Task: {'id': session[5210918f-aadc-9b29-42fa-0929c2e42627]52d5d292-c732-6204-7901-74e15c4ab891, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61962) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 983.925187] env[61962]: DEBUG nova.virt.vmwareapi.vmops [None req-c0cd2188-095e-4a7d-a70d-63e0d919c4c3 tempest-MigrationsAdminTest-5656953 tempest-MigrationsAdminTest-5656953-project-member] [instance: a6e6f30d-c667-4f70-bf60-596a8fe97188] Unregistered the VM {{(pid=61962) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 983.925187] env[61962]: DEBUG nova.virt.vmwareapi.vmops [None req-c0cd2188-095e-4a7d-a70d-63e0d919c4c3 tempest-MigrationsAdminTest-5656953 tempest-MigrationsAdminTest-5656953-project-member] [instance: a6e6f30d-c667-4f70-bf60-596a8fe97188] Deleting contents of the VM from datastore datastore2 {{(pid=61962) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 983.925187] env[61962]: DEBUG nova.virt.vmwareapi.ds_util [None req-c0cd2188-095e-4a7d-a70d-63e0d919c4c3 tempest-MigrationsAdminTest-5656953 tempest-MigrationsAdminTest-5656953-project-member] Deleting the datastore file [datastore2] a6e6f30d-c667-4f70-bf60-596a8fe97188 {{(pid=61962) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 983.925187] env[61962]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-110802bd-ae7d-4836-8ccd-a7b1442e7a4f {{(pid=61962) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 983.931784] env[61962]: DEBUG oslo_vmware.api [None req-c0cd2188-095e-4a7d-a70d-63e0d919c4c3 tempest-MigrationsAdminTest-5656953 tempest-MigrationsAdminTest-5656953-project-member] Waiting for the task: (returnval){ [ 983.931784] env[61962]: value = "task-4891988" [ 983.931784] env[61962]: _type = "Task" [ 983.931784] env[61962]: } to complete. {{(pid=61962) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 983.940434] env[61962]: DEBUG oslo_vmware.api [None req-c0cd2188-095e-4a7d-a70d-63e0d919c4c3 tempest-MigrationsAdminTest-5656953 tempest-MigrationsAdminTest-5656953-project-member] Task: {'id': task-4891988, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61962) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 984.357740] env[61962]: DEBUG nova.virt.vmwareapi.vmops [None req-719fe7db-a9f6-4ffa-bab2-00a4fa9c93c0 tempest-FloatingIPsAssociationTestJSON-152212675 tempest-FloatingIPsAssociationTestJSON-152212675-project-member] [instance: 667f56c0-896f-4e25-9e8a-3954f7733dc0] Preparing fetch location {{(pid=61962) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 984.357740] env[61962]: DEBUG nova.virt.vmwareapi.ds_util [None req-719fe7db-a9f6-4ffa-bab2-00a4fa9c93c0 tempest-FloatingIPsAssociationTestJSON-152212675 tempest-FloatingIPsAssociationTestJSON-152212675-project-member] Creating directory with path [datastore2] vmware_temp/31fb1330-0f08-4bb2-8752-fb0e9ae57d01/f684bb17-3ab2-4bd5-a19e-4c36c57d0ebe {{(pid=61962) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 984.358143] env[61962]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-54a8b712-3487-4e32-96f0-f6821d8008d4 {{(pid=61962) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 984.370894] env[61962]: DEBUG nova.virt.vmwareapi.ds_util [None req-719fe7db-a9f6-4ffa-bab2-00a4fa9c93c0 tempest-FloatingIPsAssociationTestJSON-152212675 tempest-FloatingIPsAssociationTestJSON-152212675-project-member] Created directory with path [datastore2] vmware_temp/31fb1330-0f08-4bb2-8752-fb0e9ae57d01/f684bb17-3ab2-4bd5-a19e-4c36c57d0ebe {{(pid=61962) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 984.371117] env[61962]: DEBUG nova.virt.vmwareapi.vmops [None req-719fe7db-a9f6-4ffa-bab2-00a4fa9c93c0 tempest-FloatingIPsAssociationTestJSON-152212675 tempest-FloatingIPsAssociationTestJSON-152212675-project-member] [instance: 667f56c0-896f-4e25-9e8a-3954f7733dc0] Fetch image to [datastore2] vmware_temp/31fb1330-0f08-4bb2-8752-fb0e9ae57d01/f684bb17-3ab2-4bd5-a19e-4c36c57d0ebe/tmp-sparse.vmdk {{(pid=61962) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 984.371297] env[61962]: DEBUG nova.virt.vmwareapi.vmops [None req-719fe7db-a9f6-4ffa-bab2-00a4fa9c93c0 tempest-FloatingIPsAssociationTestJSON-152212675 tempest-FloatingIPsAssociationTestJSON-152212675-project-member] [instance: 667f56c0-896f-4e25-9e8a-3954f7733dc0] Downloading image file data f684bb17-3ab2-4bd5-a19e-4c36c57d0ebe to [datastore2] vmware_temp/31fb1330-0f08-4bb2-8752-fb0e9ae57d01/f684bb17-3ab2-4bd5-a19e-4c36c57d0ebe/tmp-sparse.vmdk on the data store datastore2 {{(pid=61962) _fetch_image_as_file /opt/stack/nova/nova/virt/vmwareapi/vmops.py:399}} [ 984.372082] env[61962]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ec434260-ce69-4658-a5d9-972b7c8bc4a7 {{(pid=61962) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 984.379588] env[61962]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2b58eb47-6a22-4ae9-b10d-a87cfb36bf19 {{(pid=61962) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 984.388917] env[61962]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b3f6327a-4226-4b46-86f4-b7f0a0ffe811 {{(pid=61962) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 984.422934] env[61962]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-305b905c-8852-4993-8505-6180675e19ef {{(pid=61962) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 984.430187] env[61962]: DEBUG oslo_vmware.service [-] Invoking SessionManager.AcquireGenericServiceTicket with opID=oslo.vmware-5db0e051-2a79-4f9f-9ad1-15bd6ddafa6e {{(pid=61962) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 984.442062] env[61962]: DEBUG oslo_vmware.api [None req-c0cd2188-095e-4a7d-a70d-63e0d919c4c3 tempest-MigrationsAdminTest-5656953 tempest-MigrationsAdminTest-5656953-project-member] Task: {'id': task-4891988, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.074211} completed successfully. {{(pid=61962) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 984.442062] env[61962]: DEBUG nova.virt.vmwareapi.ds_util [None req-c0cd2188-095e-4a7d-a70d-63e0d919c4c3 tempest-MigrationsAdminTest-5656953 tempest-MigrationsAdminTest-5656953-project-member] Deleted the datastore file {{(pid=61962) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 984.442062] env[61962]: DEBUG nova.virt.vmwareapi.vmops [None req-c0cd2188-095e-4a7d-a70d-63e0d919c4c3 tempest-MigrationsAdminTest-5656953 tempest-MigrationsAdminTest-5656953-project-member] [instance: a6e6f30d-c667-4f70-bf60-596a8fe97188] Deleted contents of the VM from datastore datastore2 {{(pid=61962) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 984.442280] env[61962]: DEBUG nova.virt.vmwareapi.vmops [None req-c0cd2188-095e-4a7d-a70d-63e0d919c4c3 tempest-MigrationsAdminTest-5656953 tempest-MigrationsAdminTest-5656953-project-member] [instance: a6e6f30d-c667-4f70-bf60-596a8fe97188] Instance destroyed {{(pid=61962) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 984.442310] env[61962]: INFO nova.compute.manager [None req-c0cd2188-095e-4a7d-a70d-63e0d919c4c3 tempest-MigrationsAdminTest-5656953 tempest-MigrationsAdminTest-5656953-project-member] [instance: a6e6f30d-c667-4f70-bf60-596a8fe97188] Took 0.61 seconds to destroy the instance on the hypervisor. [ 984.445090] env[61962]: DEBUG nova.compute.claims [None req-c0cd2188-095e-4a7d-a70d-63e0d919c4c3 tempest-MigrationsAdminTest-5656953 tempest-MigrationsAdminTest-5656953-project-member] [instance: a6e6f30d-c667-4f70-bf60-596a8fe97188] Aborting claim: {{(pid=61962) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 984.445090] env[61962]: DEBUG oslo_concurrency.lockutils [None req-c0cd2188-095e-4a7d-a70d-63e0d919c4c3 tempest-MigrationsAdminTest-5656953 tempest-MigrationsAdminTest-5656953-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=61962) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 984.445327] env[61962]: DEBUG oslo_concurrency.lockutils [None req-c0cd2188-095e-4a7d-a70d-63e0d919c4c3 tempest-MigrationsAdminTest-5656953 tempest-MigrationsAdminTest-5656953-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 0.000s {{(pid=61962) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 984.453133] env[61962]: DEBUG oslo_service.periodic_task [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=61962) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 984.453133] env[61962]: DEBUG oslo_service.periodic_task [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=61962) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 984.459542] env[61962]: DEBUG nova.virt.vmwareapi.images [None req-719fe7db-a9f6-4ffa-bab2-00a4fa9c93c0 tempest-FloatingIPsAssociationTestJSON-152212675 tempest-FloatingIPsAssociationTestJSON-152212675-project-member] [instance: 667f56c0-896f-4e25-9e8a-3954f7733dc0] Downloading image file data f684bb17-3ab2-4bd5-a19e-4c36c57d0ebe to the data store datastore2 {{(pid=61962) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:245}} [ 984.526812] env[61962]: DEBUG oslo_vmware.rw_handles [None req-719fe7db-a9f6-4ffa-bab2-00a4fa9c93c0 tempest-FloatingIPsAssociationTestJSON-152212675 tempest-FloatingIPsAssociationTestJSON-152212675-project-member] Creating HTTP connection to write to file with size = 21318656 and URL = https://esx7c2n1.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/31fb1330-0f08-4bb2-8752-fb0e9ae57d01/f684bb17-3ab2-4bd5-a19e-4c36c57d0ebe/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=61962) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 984.586633] env[61962]: DEBUG oslo_vmware.rw_handles [None req-719fe7db-a9f6-4ffa-bab2-00a4fa9c93c0 tempest-FloatingIPsAssociationTestJSON-152212675 tempest-FloatingIPsAssociationTestJSON-152212675-project-member] Completed reading data from the image iterator. {{(pid=61962) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 984.586812] env[61962]: DEBUG oslo_vmware.rw_handles [None req-719fe7db-a9f6-4ffa-bab2-00a4fa9c93c0 tempest-FloatingIPsAssociationTestJSON-152212675 tempest-FloatingIPsAssociationTestJSON-152212675-project-member] Closing write handle for https://esx7c2n1.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/31fb1330-0f08-4bb2-8752-fb0e9ae57d01/f684bb17-3ab2-4bd5-a19e-4c36c57d0ebe/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=61962) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:281}} [ 984.943757] env[61962]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-989c71fc-e786-4878-9a8d-05544eeb6b90 {{(pid=61962) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 984.951932] env[61962]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a3acbcef-b194-44a7-a11c-13003a833bc6 {{(pid=61962) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 984.986743] env[61962]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bd84d003-3f6a-4768-954d-cc727a4022d3 {{(pid=61962) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 984.994808] env[61962]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-38491959-525d-4270-84f8-66b93b9d771c {{(pid=61962) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 985.009253] env[61962]: DEBUG nova.compute.provider_tree [None req-c0cd2188-095e-4a7d-a70d-63e0d919c4c3 tempest-MigrationsAdminTest-5656953 tempest-MigrationsAdminTest-5656953-project-member] Inventory has not changed in ProviderTree for provider: 5a20dc57-fddd-49ec-bab5-953a03eebaa1 {{(pid=61962) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 985.017873] env[61962]: DEBUG nova.scheduler.client.report [None req-c0cd2188-095e-4a7d-a70d-63e0d919c4c3 tempest-MigrationsAdminTest-5656953 tempest-MigrationsAdminTest-5656953-project-member] Inventory has not changed for provider 5a20dc57-fddd-49ec-bab5-953a03eebaa1 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 96, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61962) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 985.033541] env[61962]: DEBUG oslo_concurrency.lockutils [None req-c0cd2188-095e-4a7d-a70d-63e0d919c4c3 tempest-MigrationsAdminTest-5656953 tempest-MigrationsAdminTest-5656953-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 0.588s {{(pid=61962) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 985.034074] env[61962]: ERROR nova.compute.manager [None req-c0cd2188-095e-4a7d-a70d-63e0d919c4c3 tempest-MigrationsAdminTest-5656953 tempest-MigrationsAdminTest-5656953-project-member] [instance: a6e6f30d-c667-4f70-bf60-596a8fe97188] Failed to build and run instance: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 985.034074] env[61962]: Faults: ['InvalidArgument'] [ 985.034074] env[61962]: ERROR nova.compute.manager [instance: a6e6f30d-c667-4f70-bf60-596a8fe97188] Traceback (most recent call last): [ 985.034074] env[61962]: ERROR nova.compute.manager [instance: a6e6f30d-c667-4f70-bf60-596a8fe97188] File "/opt/stack/nova/nova/compute/manager.py", line 2633, in _build_and_run_instance [ 985.034074] env[61962]: ERROR nova.compute.manager [instance: a6e6f30d-c667-4f70-bf60-596a8fe97188] self.driver.spawn(context, instance, image_meta, [ 985.034074] env[61962]: ERROR nova.compute.manager [instance: a6e6f30d-c667-4f70-bf60-596a8fe97188] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 985.034074] env[61962]: ERROR nova.compute.manager [instance: a6e6f30d-c667-4f70-bf60-596a8fe97188] self._vmops.spawn(context, instance, image_meta, injected_files, [ 985.034074] env[61962]: ERROR nova.compute.manager [instance: a6e6f30d-c667-4f70-bf60-596a8fe97188] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 985.034074] env[61962]: ERROR nova.compute.manager [instance: a6e6f30d-c667-4f70-bf60-596a8fe97188] self._fetch_image_if_missing(context, vi) [ 985.034074] env[61962]: ERROR nova.compute.manager [instance: a6e6f30d-c667-4f70-bf60-596a8fe97188] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 985.034074] env[61962]: ERROR nova.compute.manager [instance: a6e6f30d-c667-4f70-bf60-596a8fe97188] image_cache(vi, tmp_image_ds_loc) [ 985.034074] env[61962]: ERROR nova.compute.manager [instance: a6e6f30d-c667-4f70-bf60-596a8fe97188] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 985.034399] env[61962]: ERROR nova.compute.manager [instance: a6e6f30d-c667-4f70-bf60-596a8fe97188] vm_util.copy_virtual_disk( [ 985.034399] env[61962]: ERROR nova.compute.manager [instance: a6e6f30d-c667-4f70-bf60-596a8fe97188] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 985.034399] env[61962]: ERROR nova.compute.manager [instance: a6e6f30d-c667-4f70-bf60-596a8fe97188] session._wait_for_task(vmdk_copy_task) [ 985.034399] env[61962]: ERROR nova.compute.manager [instance: a6e6f30d-c667-4f70-bf60-596a8fe97188] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 985.034399] env[61962]: ERROR nova.compute.manager [instance: a6e6f30d-c667-4f70-bf60-596a8fe97188] return self.wait_for_task(task_ref) [ 985.034399] env[61962]: ERROR nova.compute.manager [instance: a6e6f30d-c667-4f70-bf60-596a8fe97188] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 985.034399] env[61962]: ERROR nova.compute.manager [instance: a6e6f30d-c667-4f70-bf60-596a8fe97188] return evt.wait() [ 985.034399] env[61962]: ERROR nova.compute.manager [instance: a6e6f30d-c667-4f70-bf60-596a8fe97188] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 985.034399] env[61962]: ERROR nova.compute.manager [instance: a6e6f30d-c667-4f70-bf60-596a8fe97188] result = hub.switch() [ 985.034399] env[61962]: ERROR nova.compute.manager [instance: a6e6f30d-c667-4f70-bf60-596a8fe97188] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 985.034399] env[61962]: ERROR nova.compute.manager [instance: a6e6f30d-c667-4f70-bf60-596a8fe97188] return self.greenlet.switch() [ 985.034399] env[61962]: ERROR nova.compute.manager [instance: a6e6f30d-c667-4f70-bf60-596a8fe97188] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 985.034399] env[61962]: ERROR nova.compute.manager [instance: a6e6f30d-c667-4f70-bf60-596a8fe97188] self.f(*self.args, **self.kw) [ 985.034674] env[61962]: ERROR nova.compute.manager [instance: a6e6f30d-c667-4f70-bf60-596a8fe97188] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 985.034674] env[61962]: ERROR nova.compute.manager [instance: a6e6f30d-c667-4f70-bf60-596a8fe97188] raise exceptions.translate_fault(task_info.error) [ 985.034674] env[61962]: ERROR nova.compute.manager [instance: a6e6f30d-c667-4f70-bf60-596a8fe97188] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 985.034674] env[61962]: ERROR nova.compute.manager [instance: a6e6f30d-c667-4f70-bf60-596a8fe97188] Faults: ['InvalidArgument'] [ 985.034674] env[61962]: ERROR nova.compute.manager [instance: a6e6f30d-c667-4f70-bf60-596a8fe97188] [ 985.034788] env[61962]: DEBUG nova.compute.utils [None req-c0cd2188-095e-4a7d-a70d-63e0d919c4c3 tempest-MigrationsAdminTest-5656953 tempest-MigrationsAdminTest-5656953-project-member] [instance: a6e6f30d-c667-4f70-bf60-596a8fe97188] VimFaultException {{(pid=61962) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 985.036426] env[61962]: DEBUG nova.compute.manager [None req-c0cd2188-095e-4a7d-a70d-63e0d919c4c3 tempest-MigrationsAdminTest-5656953 tempest-MigrationsAdminTest-5656953-project-member] [instance: a6e6f30d-c667-4f70-bf60-596a8fe97188] Build of instance a6e6f30d-c667-4f70-bf60-596a8fe97188 was re-scheduled: A specified parameter was not correct: fileType [ 985.036426] env[61962]: Faults: ['InvalidArgument'] {{(pid=61962) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2472}} [ 985.036811] env[61962]: DEBUG nova.compute.manager [None req-c0cd2188-095e-4a7d-a70d-63e0d919c4c3 tempest-MigrationsAdminTest-5656953 tempest-MigrationsAdminTest-5656953-project-member] [instance: a6e6f30d-c667-4f70-bf60-596a8fe97188] Unplugging VIFs for instance {{(pid=61962) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:2998}} [ 985.036984] env[61962]: DEBUG nova.compute.manager [None req-c0cd2188-095e-4a7d-a70d-63e0d919c4c3 tempest-MigrationsAdminTest-5656953 tempest-MigrationsAdminTest-5656953-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=61962) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3021}} [ 985.037221] env[61962]: DEBUG nova.compute.manager [None req-c0cd2188-095e-4a7d-a70d-63e0d919c4c3 tempest-MigrationsAdminTest-5656953 tempest-MigrationsAdminTest-5656953-project-member] [instance: a6e6f30d-c667-4f70-bf60-596a8fe97188] Deallocating network for instance {{(pid=61962) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2281}} [ 985.037393] env[61962]: DEBUG nova.network.neutron [None req-c0cd2188-095e-4a7d-a70d-63e0d919c4c3 tempest-MigrationsAdminTest-5656953 tempest-MigrationsAdminTest-5656953-project-member] [instance: a6e6f30d-c667-4f70-bf60-596a8fe97188] deallocate_for_instance() {{(pid=61962) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 985.435046] env[61962]: DEBUG nova.network.neutron [None req-c0cd2188-095e-4a7d-a70d-63e0d919c4c3 tempest-MigrationsAdminTest-5656953 tempest-MigrationsAdminTest-5656953-project-member] [instance: a6e6f30d-c667-4f70-bf60-596a8fe97188] Updating instance_info_cache with network_info: [] {{(pid=61962) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 985.451350] env[61962]: INFO nova.compute.manager [None req-c0cd2188-095e-4a7d-a70d-63e0d919c4c3 tempest-MigrationsAdminTest-5656953 tempest-MigrationsAdminTest-5656953-project-member] [instance: a6e6f30d-c667-4f70-bf60-596a8fe97188] Took 0.41 seconds to deallocate network for instance. [ 985.588170] env[61962]: INFO nova.scheduler.client.report [None req-c0cd2188-095e-4a7d-a70d-63e0d919c4c3 tempest-MigrationsAdminTest-5656953 tempest-MigrationsAdminTest-5656953-project-member] Deleted allocations for instance a6e6f30d-c667-4f70-bf60-596a8fe97188 [ 985.610979] env[61962]: DEBUG oslo_concurrency.lockutils [None req-c0cd2188-095e-4a7d-a70d-63e0d919c4c3 tempest-MigrationsAdminTest-5656953 tempest-MigrationsAdminTest-5656953-project-member] Lock "a6e6f30d-c667-4f70-bf60-596a8fe97188" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 424.450s {{(pid=61962) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 985.612617] env[61962]: DEBUG oslo_concurrency.lockutils [None req-282d8ecb-75d0-4933-a909-a3c369574057 tempest-MigrationsAdminTest-5656953 tempest-MigrationsAdminTest-5656953-project-member] Lock "a6e6f30d-c667-4f70-bf60-596a8fe97188" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 227.174s {{(pid=61962) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 985.612825] env[61962]: DEBUG oslo_concurrency.lockutils [None req-282d8ecb-75d0-4933-a909-a3c369574057 tempest-MigrationsAdminTest-5656953 tempest-MigrationsAdminTest-5656953-project-member] Acquiring lock "a6e6f30d-c667-4f70-bf60-596a8fe97188-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=61962) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 985.613035] env[61962]: DEBUG oslo_concurrency.lockutils [None req-282d8ecb-75d0-4933-a909-a3c369574057 tempest-MigrationsAdminTest-5656953 tempest-MigrationsAdminTest-5656953-project-member] Lock "a6e6f30d-c667-4f70-bf60-596a8fe97188-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=61962) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 985.613196] env[61962]: DEBUG oslo_concurrency.lockutils [None req-282d8ecb-75d0-4933-a909-a3c369574057 tempest-MigrationsAdminTest-5656953 tempest-MigrationsAdminTest-5656953-project-member] Lock "a6e6f30d-c667-4f70-bf60-596a8fe97188-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=61962) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 985.615649] env[61962]: INFO nova.compute.manager [None req-282d8ecb-75d0-4933-a909-a3c369574057 tempest-MigrationsAdminTest-5656953 tempest-MigrationsAdminTest-5656953-project-member] [instance: a6e6f30d-c667-4f70-bf60-596a8fe97188] Terminating instance [ 985.617379] env[61962]: DEBUG nova.compute.manager [None req-282d8ecb-75d0-4933-a909-a3c369574057 tempest-MigrationsAdminTest-5656953 tempest-MigrationsAdminTest-5656953-project-member] [instance: a6e6f30d-c667-4f70-bf60-596a8fe97188] Start destroying the instance on the hypervisor. {{(pid=61962) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3142}} [ 985.617571] env[61962]: DEBUG nova.virt.vmwareapi.vmops [None req-282d8ecb-75d0-4933-a909-a3c369574057 tempest-MigrationsAdminTest-5656953 tempest-MigrationsAdminTest-5656953-project-member] [instance: a6e6f30d-c667-4f70-bf60-596a8fe97188] Destroying instance {{(pid=61962) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 985.618039] env[61962]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-51f9f47c-44e7-4c13-bcc5-1151795e37ac {{(pid=61962) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 985.629095] env[61962]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-88718f8e-d7c1-4768-a6de-4a70e9d5e2c7 {{(pid=61962) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 985.639737] env[61962]: DEBUG nova.compute.manager [None req-f8f4fbcf-abb6-411e-9491-8d17bed92cc6 tempest-ServerRescueTestJSONUnderV235-1963263010 tempest-ServerRescueTestJSONUnderV235-1963263010-project-member] [instance: e76b5908-3bad-4c19-90e7-b15ca253ad80] Starting instance... {{(pid=61962) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2424}} [ 985.663933] env[61962]: WARNING nova.virt.vmwareapi.vmops [None req-282d8ecb-75d0-4933-a909-a3c369574057 tempest-MigrationsAdminTest-5656953 tempest-MigrationsAdminTest-5656953-project-member] [instance: a6e6f30d-c667-4f70-bf60-596a8fe97188] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance a6e6f30d-c667-4f70-bf60-596a8fe97188 could not be found. [ 985.664156] env[61962]: DEBUG nova.virt.vmwareapi.vmops [None req-282d8ecb-75d0-4933-a909-a3c369574057 tempest-MigrationsAdminTest-5656953 tempest-MigrationsAdminTest-5656953-project-member] [instance: a6e6f30d-c667-4f70-bf60-596a8fe97188] Instance destroyed {{(pid=61962) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 985.664344] env[61962]: INFO nova.compute.manager [None req-282d8ecb-75d0-4933-a909-a3c369574057 tempest-MigrationsAdminTest-5656953 tempest-MigrationsAdminTest-5656953-project-member] [instance: a6e6f30d-c667-4f70-bf60-596a8fe97188] Took 0.05 seconds to destroy the instance on the hypervisor. [ 985.664580] env[61962]: DEBUG oslo.service.loopingcall [None req-282d8ecb-75d0-4933-a909-a3c369574057 tempest-MigrationsAdminTest-5656953 tempest-MigrationsAdminTest-5656953-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61962) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 985.664804] env[61962]: DEBUG nova.compute.manager [-] [instance: a6e6f30d-c667-4f70-bf60-596a8fe97188] Deallocating network for instance {{(pid=61962) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2281}} [ 985.664898] env[61962]: DEBUG nova.network.neutron [-] [instance: a6e6f30d-c667-4f70-bf60-596a8fe97188] deallocate_for_instance() {{(pid=61962) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 985.689305] env[61962]: DEBUG oslo_concurrency.lockutils [None req-f8f4fbcf-abb6-411e-9491-8d17bed92cc6 tempest-ServerRescueTestJSONUnderV235-1963263010 tempest-ServerRescueTestJSONUnderV235-1963263010-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61962) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 985.689590] env[61962]: DEBUG oslo_concurrency.lockutils [None req-f8f4fbcf-abb6-411e-9491-8d17bed92cc6 tempest-ServerRescueTestJSONUnderV235-1963263010 tempest-ServerRescueTestJSONUnderV235-1963263010-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=61962) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 985.691067] env[61962]: INFO nova.compute.claims [None req-f8f4fbcf-abb6-411e-9491-8d17bed92cc6 tempest-ServerRescueTestJSONUnderV235-1963263010 tempest-ServerRescueTestJSONUnderV235-1963263010-project-member] [instance: e76b5908-3bad-4c19-90e7-b15ca253ad80] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 985.701538] env[61962]: DEBUG nova.network.neutron [-] [instance: a6e6f30d-c667-4f70-bf60-596a8fe97188] Updating instance_info_cache with network_info: [] {{(pid=61962) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 985.711407] env[61962]: INFO nova.compute.manager [-] [instance: a6e6f30d-c667-4f70-bf60-596a8fe97188] Took 0.05 seconds to deallocate network for instance. [ 985.799120] env[61962]: DEBUG oslo_concurrency.lockutils [None req-282d8ecb-75d0-4933-a909-a3c369574057 tempest-MigrationsAdminTest-5656953 tempest-MigrationsAdminTest-5656953-project-member] Lock "a6e6f30d-c667-4f70-bf60-596a8fe97188" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 0.186s {{(pid=61962) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 986.130807] env[61962]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5393c9e0-d9e2-4149-a854-14f7fc256f8c {{(pid=61962) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 986.139942] env[61962]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-98682932-47ea-46d0-bcab-3f23664131bf {{(pid=61962) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 986.169851] env[61962]: DEBUG oslo_service.periodic_task [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=61962) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 986.170106] env[61962]: DEBUG nova.compute.manager [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Starting heal instance info cache {{(pid=61962) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10007}} [ 986.170176] env[61962]: DEBUG nova.compute.manager [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Rebuilding the list of instances to heal {{(pid=61962) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10011}} [ 986.172264] env[61962]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-195b9cdd-bd4c-4889-9fd3-409fe07628d1 {{(pid=61962) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 986.180886] env[61962]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6e471c0b-4038-4e7f-bd18-2d1c1fd61a26 {{(pid=61962) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 986.195039] env[61962]: DEBUG nova.compute.provider_tree [None req-f8f4fbcf-abb6-411e-9491-8d17bed92cc6 tempest-ServerRescueTestJSONUnderV235-1963263010 tempest-ServerRescueTestJSONUnderV235-1963263010-project-member] Inventory has not changed in ProviderTree for provider: 5a20dc57-fddd-49ec-bab5-953a03eebaa1 {{(pid=61962) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 986.198139] env[61962]: DEBUG nova.compute.manager [None req-ba337279-4320-40ca-b616-7e590432f203 None None] [instance: 667f56c0-896f-4e25-9e8a-3954f7733dc0] Skipping network cache update for instance because it is Building. {{(pid=61962) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10020}} [ 986.198294] env[61962]: DEBUG nova.compute.manager [None req-ba337279-4320-40ca-b616-7e590432f203 None None] [instance: 295064f0-9677-4287-877a-ef2a33a87fef] Skipping network cache update for instance because it is Building. {{(pid=61962) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10020}} [ 986.198426] env[61962]: DEBUG nova.compute.manager [None req-ba337279-4320-40ca-b616-7e590432f203 None None] [instance: 6996289c-f881-4733-8943-98e3633e43ca] Skipping network cache update for instance because it is Building. {{(pid=61962) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10020}} [ 986.198552] env[61962]: DEBUG nova.compute.manager [None req-ba337279-4320-40ca-b616-7e590432f203 None None] [instance: 85c6b6af-f1b5-47ca-8e02-61c3669ec02b] Skipping network cache update for instance because it is Building. {{(pid=61962) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10020}} [ 986.198676] env[61962]: DEBUG nova.compute.manager [None req-ba337279-4320-40ca-b616-7e590432f203 None None] [instance: 0aa340f7-a219-47f5-9d7d-dd4062643cf7] Skipping network cache update for instance because it is Building. {{(pid=61962) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10020}} [ 986.198800] env[61962]: DEBUG nova.compute.manager [None req-ba337279-4320-40ca-b616-7e590432f203 None None] [instance: ff7d9da5-bc9a-41f2-a2e4-521d87b22117] Skipping network cache update for instance because it is Building. {{(pid=61962) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10020}} [ 986.198919] env[61962]: DEBUG nova.compute.manager [None req-ba337279-4320-40ca-b616-7e590432f203 None None] [instance: bf82c44c-95ce-4e4c-a32f-444bba1ef5cd] Skipping network cache update for instance because it is Building. {{(pid=61962) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10020}} [ 986.199047] env[61962]: DEBUG nova.compute.manager [None req-ba337279-4320-40ca-b616-7e590432f203 None None] [instance: 1dcafe3b-989b-4290-be54-ceccc62a323f] Skipping network cache update for instance because it is Building. {{(pid=61962) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10020}} [ 986.199169] env[61962]: DEBUG nova.compute.manager [None req-ba337279-4320-40ca-b616-7e590432f203 None None] [instance: 454eebcc-ea18-4046-ba2e-01de1c8a954a] Skipping network cache update for instance because it is Building. {{(pid=61962) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10020}} [ 986.199284] env[61962]: DEBUG nova.compute.manager [None req-ba337279-4320-40ca-b616-7e590432f203 None None] [instance: e76b5908-3bad-4c19-90e7-b15ca253ad80] Skipping network cache update for instance because it is Building. {{(pid=61962) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10020}} [ 986.199406] env[61962]: DEBUG nova.compute.manager [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Didn't find any instances for network info cache update. {{(pid=61962) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10093}} [ 986.200038] env[61962]: DEBUG oslo_service.periodic_task [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=61962) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 986.202680] env[61962]: DEBUG nova.scheduler.client.report [None req-f8f4fbcf-abb6-411e-9491-8d17bed92cc6 tempest-ServerRescueTestJSONUnderV235-1963263010 tempest-ServerRescueTestJSONUnderV235-1963263010-project-member] Inventory has not changed for provider 5a20dc57-fddd-49ec-bab5-953a03eebaa1 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 96, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61962) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 986.220971] env[61962]: DEBUG oslo_concurrency.lockutils [None req-f8f4fbcf-abb6-411e-9491-8d17bed92cc6 tempest-ServerRescueTestJSONUnderV235-1963263010 tempest-ServerRescueTestJSONUnderV235-1963263010-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.531s {{(pid=61962) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 986.221478] env[61962]: DEBUG nova.compute.manager [None req-f8f4fbcf-abb6-411e-9491-8d17bed92cc6 tempest-ServerRescueTestJSONUnderV235-1963263010 tempest-ServerRescueTestJSONUnderV235-1963263010-project-member] [instance: e76b5908-3bad-4c19-90e7-b15ca253ad80] Start building networks asynchronously for instance. {{(pid=61962) _build_resources /opt/stack/nova/nova/compute/manager.py:2821}} [ 986.254928] env[61962]: DEBUG nova.compute.utils [None req-f8f4fbcf-abb6-411e-9491-8d17bed92cc6 tempest-ServerRescueTestJSONUnderV235-1963263010 tempest-ServerRescueTestJSONUnderV235-1963263010-project-member] Using /dev/sd instead of None {{(pid=61962) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 986.256489] env[61962]: DEBUG nova.compute.manager [None req-f8f4fbcf-abb6-411e-9491-8d17bed92cc6 tempest-ServerRescueTestJSONUnderV235-1963263010 tempest-ServerRescueTestJSONUnderV235-1963263010-project-member] [instance: e76b5908-3bad-4c19-90e7-b15ca253ad80] Allocating IP information in the background. {{(pid=61962) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1973}} [ 986.256665] env[61962]: DEBUG nova.network.neutron [None req-f8f4fbcf-abb6-411e-9491-8d17bed92cc6 tempest-ServerRescueTestJSONUnderV235-1963263010 tempest-ServerRescueTestJSONUnderV235-1963263010-project-member] [instance: e76b5908-3bad-4c19-90e7-b15ca253ad80] allocate_for_instance() {{(pid=61962) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 986.265740] env[61962]: DEBUG nova.compute.manager [None req-f8f4fbcf-abb6-411e-9491-8d17bed92cc6 tempest-ServerRescueTestJSONUnderV235-1963263010 tempest-ServerRescueTestJSONUnderV235-1963263010-project-member] [instance: e76b5908-3bad-4c19-90e7-b15ca253ad80] Start building block device mappings for instance. {{(pid=61962) _build_resources /opt/stack/nova/nova/compute/manager.py:2856}} [ 986.313008] env[61962]: DEBUG nova.policy [None req-f8f4fbcf-abb6-411e-9491-8d17bed92cc6 tempest-ServerRescueTestJSONUnderV235-1963263010 tempest-ServerRescueTestJSONUnderV235-1963263010-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '5bbb37dd64b648eeb31e755acbe813a5', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '3285f5bbd10d495e852d0ef0c0047847', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61962) authorize /opt/stack/nova/nova/policy.py:203}} [ 986.334290] env[61962]: DEBUG nova.compute.manager [None req-f8f4fbcf-abb6-411e-9491-8d17bed92cc6 tempest-ServerRescueTestJSONUnderV235-1963263010 tempest-ServerRescueTestJSONUnderV235-1963263010-project-member] [instance: e76b5908-3bad-4c19-90e7-b15ca253ad80] Start spawning the instance on the hypervisor. {{(pid=61962) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2630}} [ 986.363928] env[61962]: DEBUG nova.virt.hardware [None req-f8f4fbcf-abb6-411e-9491-8d17bed92cc6 tempest-ServerRescueTestJSONUnderV235-1963263010 tempest-ServerRescueTestJSONUnderV235-1963263010-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-12-01T12:09:23Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=128,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-12-01T12:09:07Z,direct_url=,disk_format='vmdk',id=f684bb17-3ab2-4bd5-a19e-4c36c57d0ebe,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='eef556fb5c4f49b889491ae31a496de5',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-12-01T12:09:08Z,virtual_size=,visibility=), allow threads: False {{(pid=61962) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 986.363928] env[61962]: DEBUG nova.virt.hardware [None req-f8f4fbcf-abb6-411e-9491-8d17bed92cc6 tempest-ServerRescueTestJSONUnderV235-1963263010 tempest-ServerRescueTestJSONUnderV235-1963263010-project-member] Flavor limits 0:0:0 {{(pid=61962) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 986.363928] env[61962]: DEBUG nova.virt.hardware [None req-f8f4fbcf-abb6-411e-9491-8d17bed92cc6 tempest-ServerRescueTestJSONUnderV235-1963263010 tempest-ServerRescueTestJSONUnderV235-1963263010-project-member] Image limits 0:0:0 {{(pid=61962) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 986.364197] env[61962]: DEBUG nova.virt.hardware [None req-f8f4fbcf-abb6-411e-9491-8d17bed92cc6 tempest-ServerRescueTestJSONUnderV235-1963263010 tempest-ServerRescueTestJSONUnderV235-1963263010-project-member] Flavor pref 0:0:0 {{(pid=61962) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 986.364197] env[61962]: DEBUG nova.virt.hardware [None req-f8f4fbcf-abb6-411e-9491-8d17bed92cc6 tempest-ServerRescueTestJSONUnderV235-1963263010 tempest-ServerRescueTestJSONUnderV235-1963263010-project-member] Image pref 0:0:0 {{(pid=61962) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 986.364197] env[61962]: DEBUG nova.virt.hardware [None req-f8f4fbcf-abb6-411e-9491-8d17bed92cc6 tempest-ServerRescueTestJSONUnderV235-1963263010 tempest-ServerRescueTestJSONUnderV235-1963263010-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61962) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 986.364197] env[61962]: DEBUG nova.virt.hardware [None req-f8f4fbcf-abb6-411e-9491-8d17bed92cc6 tempest-ServerRescueTestJSONUnderV235-1963263010 tempest-ServerRescueTestJSONUnderV235-1963263010-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61962) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 986.364197] env[61962]: DEBUG nova.virt.hardware [None req-f8f4fbcf-abb6-411e-9491-8d17bed92cc6 tempest-ServerRescueTestJSONUnderV235-1963263010 tempest-ServerRescueTestJSONUnderV235-1963263010-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61962) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 986.364395] env[61962]: DEBUG nova.virt.hardware [None req-f8f4fbcf-abb6-411e-9491-8d17bed92cc6 tempest-ServerRescueTestJSONUnderV235-1963263010 tempest-ServerRescueTestJSONUnderV235-1963263010-project-member] Got 1 possible topologies {{(pid=61962) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 986.364395] env[61962]: DEBUG nova.virt.hardware [None req-f8f4fbcf-abb6-411e-9491-8d17bed92cc6 tempest-ServerRescueTestJSONUnderV235-1963263010 tempest-ServerRescueTestJSONUnderV235-1963263010-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61962) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 986.364395] env[61962]: DEBUG nova.virt.hardware [None req-f8f4fbcf-abb6-411e-9491-8d17bed92cc6 tempest-ServerRescueTestJSONUnderV235-1963263010 tempest-ServerRescueTestJSONUnderV235-1963263010-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61962) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 986.365068] env[61962]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0faf103e-1f82-468d-89b9-48b6e6142593 {{(pid=61962) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 986.373600] env[61962]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e4df51d7-64f7-4f83-b0f0-1bd51f6ed93c {{(pid=61962) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 986.766570] env[61962]: DEBUG nova.network.neutron [None req-f8f4fbcf-abb6-411e-9491-8d17bed92cc6 tempest-ServerRescueTestJSONUnderV235-1963263010 tempest-ServerRescueTestJSONUnderV235-1963263010-project-member] [instance: e76b5908-3bad-4c19-90e7-b15ca253ad80] Successfully created port: 3cd995e0-d535-4b6e-b89a-ad891cbf268e {{(pid=61962) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 987.158573] env[61962]: DEBUG oslo_service.periodic_task [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=61962) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 987.791480] env[61962]: DEBUG nova.network.neutron [None req-f8f4fbcf-abb6-411e-9491-8d17bed92cc6 tempest-ServerRescueTestJSONUnderV235-1963263010 tempest-ServerRescueTestJSONUnderV235-1963263010-project-member] [instance: e76b5908-3bad-4c19-90e7-b15ca253ad80] Successfully updated port: 3cd995e0-d535-4b6e-b89a-ad891cbf268e {{(pid=61962) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 987.791798] env[61962]: DEBUG nova.compute.manager [req-552e9a8d-ef27-4392-ab21-142491a23198 req-37861017-3057-4838-98b9-886d6d338800 service nova] [instance: e76b5908-3bad-4c19-90e7-b15ca253ad80] Received event network-vif-plugged-3cd995e0-d535-4b6e-b89a-ad891cbf268e {{(pid=61962) external_instance_event /opt/stack/nova/nova/compute/manager.py:11210}} [ 987.791798] env[61962]: DEBUG oslo_concurrency.lockutils [req-552e9a8d-ef27-4392-ab21-142491a23198 req-37861017-3057-4838-98b9-886d6d338800 service nova] Acquiring lock "e76b5908-3bad-4c19-90e7-b15ca253ad80-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=61962) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 987.792089] env[61962]: DEBUG oslo_concurrency.lockutils [req-552e9a8d-ef27-4392-ab21-142491a23198 req-37861017-3057-4838-98b9-886d6d338800 service nova] Lock "e76b5908-3bad-4c19-90e7-b15ca253ad80-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=61962) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 987.792172] env[61962]: DEBUG oslo_concurrency.lockutils [req-552e9a8d-ef27-4392-ab21-142491a23198 req-37861017-3057-4838-98b9-886d6d338800 service nova] Lock "e76b5908-3bad-4c19-90e7-b15ca253ad80-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=61962) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 987.792317] env[61962]: DEBUG nova.compute.manager [req-552e9a8d-ef27-4392-ab21-142491a23198 req-37861017-3057-4838-98b9-886d6d338800 service nova] [instance: e76b5908-3bad-4c19-90e7-b15ca253ad80] No waiting events found dispatching network-vif-plugged-3cd995e0-d535-4b6e-b89a-ad891cbf268e {{(pid=61962) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 987.792473] env[61962]: WARNING nova.compute.manager [req-552e9a8d-ef27-4392-ab21-142491a23198 req-37861017-3057-4838-98b9-886d6d338800 service nova] [instance: e76b5908-3bad-4c19-90e7-b15ca253ad80] Received unexpected event network-vif-plugged-3cd995e0-d535-4b6e-b89a-ad891cbf268e for instance with vm_state building and task_state spawning. [ 987.807922] env[61962]: DEBUG oslo_concurrency.lockutils [None req-f8f4fbcf-abb6-411e-9491-8d17bed92cc6 tempest-ServerRescueTestJSONUnderV235-1963263010 tempest-ServerRescueTestJSONUnderV235-1963263010-project-member] Acquiring lock "refresh_cache-e76b5908-3bad-4c19-90e7-b15ca253ad80" {{(pid=61962) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 987.808095] env[61962]: DEBUG oslo_concurrency.lockutils [None req-f8f4fbcf-abb6-411e-9491-8d17bed92cc6 tempest-ServerRescueTestJSONUnderV235-1963263010 tempest-ServerRescueTestJSONUnderV235-1963263010-project-member] Acquired lock "refresh_cache-e76b5908-3bad-4c19-90e7-b15ca253ad80" {{(pid=61962) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 987.808286] env[61962]: DEBUG nova.network.neutron [None req-f8f4fbcf-abb6-411e-9491-8d17bed92cc6 tempest-ServerRescueTestJSONUnderV235-1963263010 tempest-ServerRescueTestJSONUnderV235-1963263010-project-member] [instance: e76b5908-3bad-4c19-90e7-b15ca253ad80] Building network info cache for instance {{(pid=61962) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 987.873467] env[61962]: DEBUG nova.network.neutron [None req-f8f4fbcf-abb6-411e-9491-8d17bed92cc6 tempest-ServerRescueTestJSONUnderV235-1963263010 tempest-ServerRescueTestJSONUnderV235-1963263010-project-member] [instance: e76b5908-3bad-4c19-90e7-b15ca253ad80] Instance cache missing network info. {{(pid=61962) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 988.086297] env[61962]: DEBUG nova.network.neutron [None req-f8f4fbcf-abb6-411e-9491-8d17bed92cc6 tempest-ServerRescueTestJSONUnderV235-1963263010 tempest-ServerRescueTestJSONUnderV235-1963263010-project-member] [instance: e76b5908-3bad-4c19-90e7-b15ca253ad80] Updating instance_info_cache with network_info: [{"id": "3cd995e0-d535-4b6e-b89a-ad891cbf268e", "address": "fa:16:3e:e6:96:d1", "network": {"id": "68f541cc-0785-48ff-b4de-116eb94f29f2", "bridge": "br-int", "label": "tempest-ServerRescueTestJSONUnderV235-1920871856-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": false}}], "meta": {"injected": false, "tenant_id": "3285f5bbd10d495e852d0ef0c0047847", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "8ecc4615-18f0-4324-8e16-5e5d513325e2", "external-id": "nsx-vlan-transportzone-167", "segmentation_id": 167, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap3cd995e0-d5", "ovs_interfaceid": "3cd995e0-d535-4b6e-b89a-ad891cbf268e", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61962) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 988.102752] env[61962]: DEBUG oslo_concurrency.lockutils [None req-f8f4fbcf-abb6-411e-9491-8d17bed92cc6 tempest-ServerRescueTestJSONUnderV235-1963263010 tempest-ServerRescueTestJSONUnderV235-1963263010-project-member] Releasing lock "refresh_cache-e76b5908-3bad-4c19-90e7-b15ca253ad80" {{(pid=61962) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 988.103078] env[61962]: DEBUG nova.compute.manager [None req-f8f4fbcf-abb6-411e-9491-8d17bed92cc6 tempest-ServerRescueTestJSONUnderV235-1963263010 tempest-ServerRescueTestJSONUnderV235-1963263010-project-member] [instance: e76b5908-3bad-4c19-90e7-b15ca253ad80] Instance network_info: |[{"id": "3cd995e0-d535-4b6e-b89a-ad891cbf268e", "address": "fa:16:3e:e6:96:d1", "network": {"id": "68f541cc-0785-48ff-b4de-116eb94f29f2", "bridge": "br-int", "label": "tempest-ServerRescueTestJSONUnderV235-1920871856-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": false}}], "meta": {"injected": false, "tenant_id": "3285f5bbd10d495e852d0ef0c0047847", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "8ecc4615-18f0-4324-8e16-5e5d513325e2", "external-id": "nsx-vlan-transportzone-167", "segmentation_id": 167, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap3cd995e0-d5", "ovs_interfaceid": "3cd995e0-d535-4b6e-b89a-ad891cbf268e", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=61962) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 988.103507] env[61962]: DEBUG nova.virt.vmwareapi.vmops [None req-f8f4fbcf-abb6-411e-9491-8d17bed92cc6 tempest-ServerRescueTestJSONUnderV235-1963263010 tempest-ServerRescueTestJSONUnderV235-1963263010-project-member] [instance: e76b5908-3bad-4c19-90e7-b15ca253ad80] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:e6:96:d1', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '8ecc4615-18f0-4324-8e16-5e5d513325e2', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '3cd995e0-d535-4b6e-b89a-ad891cbf268e', 'vif_model': 'vmxnet3'}] {{(pid=61962) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 988.113058] env[61962]: DEBUG nova.virt.vmwareapi.vm_util [None req-f8f4fbcf-abb6-411e-9491-8d17bed92cc6 tempest-ServerRescueTestJSONUnderV235-1963263010 tempest-ServerRescueTestJSONUnderV235-1963263010-project-member] Creating folder: Project (3285f5bbd10d495e852d0ef0c0047847). Parent ref: group-v953327. {{(pid=61962) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 988.113694] env[61962]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-87f18595-1318-4f89-8168-2989df048b06 {{(pid=61962) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 988.125405] env[61962]: INFO nova.virt.vmwareapi.vm_util [None req-f8f4fbcf-abb6-411e-9491-8d17bed92cc6 tempest-ServerRescueTestJSONUnderV235-1963263010 tempest-ServerRescueTestJSONUnderV235-1963263010-project-member] Created folder: Project (3285f5bbd10d495e852d0ef0c0047847) in parent group-v953327. [ 988.125597] env[61962]: DEBUG nova.virt.vmwareapi.vm_util [None req-f8f4fbcf-abb6-411e-9491-8d17bed92cc6 tempest-ServerRescueTestJSONUnderV235-1963263010 tempest-ServerRescueTestJSONUnderV235-1963263010-project-member] Creating folder: Instances. Parent ref: group-v953384. {{(pid=61962) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 988.125842] env[61962]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-35932bf6-9b05-47a0-a889-07a3638b6e4a {{(pid=61962) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 988.134218] env[61962]: INFO nova.virt.vmwareapi.vm_util [None req-f8f4fbcf-abb6-411e-9491-8d17bed92cc6 tempest-ServerRescueTestJSONUnderV235-1963263010 tempest-ServerRescueTestJSONUnderV235-1963263010-project-member] Created folder: Instances in parent group-v953384. [ 988.134453] env[61962]: DEBUG oslo.service.loopingcall [None req-f8f4fbcf-abb6-411e-9491-8d17bed92cc6 tempest-ServerRescueTestJSONUnderV235-1963263010 tempest-ServerRescueTestJSONUnderV235-1963263010-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=61962) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 988.134638] env[61962]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: e76b5908-3bad-4c19-90e7-b15ca253ad80] Creating VM on the ESX host {{(pid=61962) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 988.134847] env[61962]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-a7eef492-57ac-447c-b5c0-862f5a9e57dd {{(pid=61962) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 988.154092] env[61962]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 988.154092] env[61962]: value = "task-4891991" [ 988.154092] env[61962]: _type = "Task" [ 988.154092] env[61962]: } to complete. {{(pid=61962) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 988.158801] env[61962]: DEBUG oslo_service.periodic_task [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=61962) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 988.159078] env[61962]: DEBUG oslo_service.periodic_task [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=61962) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 988.162439] env[61962]: DEBUG oslo_vmware.api [-] Task: {'id': task-4891991, 'name': CreateVM_Task} progress is 0%. {{(pid=61962) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 988.664730] env[61962]: DEBUG oslo_vmware.api [-] Task: {'id': task-4891991, 'name': CreateVM_Task, 'duration_secs': 0.323421} completed successfully. {{(pid=61962) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 988.664908] env[61962]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: e76b5908-3bad-4c19-90e7-b15ca253ad80] Created VM on the ESX host {{(pid=61962) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 988.665665] env[61962]: DEBUG oslo_concurrency.lockutils [None req-f8f4fbcf-abb6-411e-9491-8d17bed92cc6 tempest-ServerRescueTestJSONUnderV235-1963263010 tempest-ServerRescueTestJSONUnderV235-1963263010-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/f684bb17-3ab2-4bd5-a19e-4c36c57d0ebe" {{(pid=61962) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 988.665829] env[61962]: DEBUG oslo_concurrency.lockutils [None req-f8f4fbcf-abb6-411e-9491-8d17bed92cc6 tempest-ServerRescueTestJSONUnderV235-1963263010 tempest-ServerRescueTestJSONUnderV235-1963263010-project-member] Acquired lock "[datastore2] devstack-image-cache_base/f684bb17-3ab2-4bd5-a19e-4c36c57d0ebe" {{(pid=61962) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 988.666169] env[61962]: DEBUG oslo_concurrency.lockutils [None req-f8f4fbcf-abb6-411e-9491-8d17bed92cc6 tempest-ServerRescueTestJSONUnderV235-1963263010 tempest-ServerRescueTestJSONUnderV235-1963263010-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/f684bb17-3ab2-4bd5-a19e-4c36c57d0ebe" {{(pid=61962) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 988.666431] env[61962]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-6a4a81db-6753-482f-b3ad-c49c1a2f1e9e {{(pid=61962) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 988.672177] env[61962]: DEBUG oslo_vmware.api [None req-f8f4fbcf-abb6-411e-9491-8d17bed92cc6 tempest-ServerRescueTestJSONUnderV235-1963263010 tempest-ServerRescueTestJSONUnderV235-1963263010-project-member] Waiting for the task: (returnval){ [ 988.672177] env[61962]: value = "session[5210918f-aadc-9b29-42fa-0929c2e42627]522c0619-5fc0-8b35-8503-562c07a01ad2" [ 988.672177] env[61962]: _type = "Task" [ 988.672177] env[61962]: } to complete. {{(pid=61962) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 988.681550] env[61962]: DEBUG oslo_vmware.api [None req-f8f4fbcf-abb6-411e-9491-8d17bed92cc6 tempest-ServerRescueTestJSONUnderV235-1963263010 tempest-ServerRescueTestJSONUnderV235-1963263010-project-member] Task: {'id': session[5210918f-aadc-9b29-42fa-0929c2e42627]522c0619-5fc0-8b35-8503-562c07a01ad2, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61962) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 989.183337] env[61962]: DEBUG oslo_concurrency.lockutils [None req-f8f4fbcf-abb6-411e-9491-8d17bed92cc6 tempest-ServerRescueTestJSONUnderV235-1963263010 tempest-ServerRescueTestJSONUnderV235-1963263010-project-member] Releasing lock "[datastore2] devstack-image-cache_base/f684bb17-3ab2-4bd5-a19e-4c36c57d0ebe" {{(pid=61962) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 989.183655] env[61962]: DEBUG nova.virt.vmwareapi.vmops [None req-f8f4fbcf-abb6-411e-9491-8d17bed92cc6 tempest-ServerRescueTestJSONUnderV235-1963263010 tempest-ServerRescueTestJSONUnderV235-1963263010-project-member] [instance: e76b5908-3bad-4c19-90e7-b15ca253ad80] Processing image f684bb17-3ab2-4bd5-a19e-4c36c57d0ebe {{(pid=61962) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 989.183791] env[61962]: DEBUG oslo_concurrency.lockutils [None req-f8f4fbcf-abb6-411e-9491-8d17bed92cc6 tempest-ServerRescueTestJSONUnderV235-1963263010 tempest-ServerRescueTestJSONUnderV235-1963263010-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/f684bb17-3ab2-4bd5-a19e-4c36c57d0ebe/f684bb17-3ab2-4bd5-a19e-4c36c57d0ebe.vmdk" {{(pid=61962) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 989.879469] env[61962]: DEBUG nova.compute.manager [req-a151afeb-3763-4ffc-b858-b5455dc634e9 req-3292f506-a78f-4871-b592-85c6ba337083 service nova] [instance: e76b5908-3bad-4c19-90e7-b15ca253ad80] Received event network-changed-3cd995e0-d535-4b6e-b89a-ad891cbf268e {{(pid=61962) external_instance_event /opt/stack/nova/nova/compute/manager.py:11210}} [ 989.879711] env[61962]: DEBUG nova.compute.manager [req-a151afeb-3763-4ffc-b858-b5455dc634e9 req-3292f506-a78f-4871-b592-85c6ba337083 service nova] [instance: e76b5908-3bad-4c19-90e7-b15ca253ad80] Refreshing instance network info cache due to event network-changed-3cd995e0-d535-4b6e-b89a-ad891cbf268e. {{(pid=61962) external_instance_event /opt/stack/nova/nova/compute/manager.py:11215}} [ 989.879947] env[61962]: DEBUG oslo_concurrency.lockutils [req-a151afeb-3763-4ffc-b858-b5455dc634e9 req-3292f506-a78f-4871-b592-85c6ba337083 service nova] Acquiring lock "refresh_cache-e76b5908-3bad-4c19-90e7-b15ca253ad80" {{(pid=61962) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 989.880384] env[61962]: DEBUG oslo_concurrency.lockutils [req-a151afeb-3763-4ffc-b858-b5455dc634e9 req-3292f506-a78f-4871-b592-85c6ba337083 service nova] Acquired lock "refresh_cache-e76b5908-3bad-4c19-90e7-b15ca253ad80" {{(pid=61962) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 989.880683] env[61962]: DEBUG nova.network.neutron [req-a151afeb-3763-4ffc-b858-b5455dc634e9 req-3292f506-a78f-4871-b592-85c6ba337083 service nova] [instance: e76b5908-3bad-4c19-90e7-b15ca253ad80] Refreshing network info cache for port 3cd995e0-d535-4b6e-b89a-ad891cbf268e {{(pid=61962) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 990.255104] env[61962]: DEBUG nova.network.neutron [req-a151afeb-3763-4ffc-b858-b5455dc634e9 req-3292f506-a78f-4871-b592-85c6ba337083 service nova] [instance: e76b5908-3bad-4c19-90e7-b15ca253ad80] Updated VIF entry in instance network info cache for port 3cd995e0-d535-4b6e-b89a-ad891cbf268e. {{(pid=61962) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 990.255434] env[61962]: DEBUG nova.network.neutron [req-a151afeb-3763-4ffc-b858-b5455dc634e9 req-3292f506-a78f-4871-b592-85c6ba337083 service nova] [instance: e76b5908-3bad-4c19-90e7-b15ca253ad80] Updating instance_info_cache with network_info: [{"id": "3cd995e0-d535-4b6e-b89a-ad891cbf268e", "address": "fa:16:3e:e6:96:d1", "network": {"id": "68f541cc-0785-48ff-b4de-116eb94f29f2", "bridge": "br-int", "label": "tempest-ServerRescueTestJSONUnderV235-1920871856-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": false}}], "meta": {"injected": false, "tenant_id": "3285f5bbd10d495e852d0ef0c0047847", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "8ecc4615-18f0-4324-8e16-5e5d513325e2", "external-id": "nsx-vlan-transportzone-167", "segmentation_id": 167, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap3cd995e0-d5", "ovs_interfaceid": "3cd995e0-d535-4b6e-b89a-ad891cbf268e", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61962) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 990.265268] env[61962]: DEBUG oslo_concurrency.lockutils [req-a151afeb-3763-4ffc-b858-b5455dc634e9 req-3292f506-a78f-4871-b592-85c6ba337083 service nova] Releasing lock "refresh_cache-e76b5908-3bad-4c19-90e7-b15ca253ad80" {{(pid=61962) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 994.537495] env[61962]: DEBUG oslo_concurrency.lockutils [None req-970286b6-8e70-4710-b1f2-9dd9b0fe75ad tempest-AttachInterfacesTestJSON-87713823 tempest-AttachInterfacesTestJSON-87713823-project-member] Acquiring lock "454eebcc-ea18-4046-ba2e-01de1c8a954a" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=61962) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 995.400864] env[61962]: DEBUG oslo_concurrency.lockutils [None req-d295ccfe-84f2-4576-a0ce-ba7fcc6caa29 tempest-ServerRescueTestJSONUnderV235-1963263010 tempest-ServerRescueTestJSONUnderV235-1963263010-project-member] Acquiring lock "e76b5908-3bad-4c19-90e7-b15ca253ad80" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=61962) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 999.390443] env[61962]: DEBUG oslo_concurrency.lockutils [None req-315218da-fc87-4c93-928d-220a133db47c tempest-ServerShowV254Test-238986200 tempest-ServerShowV254Test-238986200-project-member] Acquiring lock "d970d822-12ab-43cc-8d34-abc6eff8a68d" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61962) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 999.390801] env[61962]: DEBUG oslo_concurrency.lockutils [None req-315218da-fc87-4c93-928d-220a133db47c tempest-ServerShowV254Test-238986200 tempest-ServerShowV254Test-238986200-project-member] Lock "d970d822-12ab-43cc-8d34-abc6eff8a68d" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61962) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1010.792869] env[61962]: DEBUG oslo_concurrency.lockutils [None req-a1dcb04c-7139-44cd-9826-36b1b040ba70 tempest-SecurityGroupsTestJSON-26720826 tempest-SecurityGroupsTestJSON-26720826-project-member] Acquiring lock "233cd3d2-df58-4826-9bb1-a93167d4cfa4" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61962) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1010.793169] env[61962]: DEBUG oslo_concurrency.lockutils [None req-a1dcb04c-7139-44cd-9826-36b1b040ba70 tempest-SecurityGroupsTestJSON-26720826 tempest-SecurityGroupsTestJSON-26720826-project-member] Lock "233cd3d2-df58-4826-9bb1-a93167d4cfa4" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61962) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1012.221699] env[61962]: DEBUG oslo_concurrency.lockutils [None req-0c587f3b-20e0-453c-8616-e07485ee1c45 tempest-AttachVolumeNegativeTest-1446100430 tempest-AttachVolumeNegativeTest-1446100430-project-member] Acquiring lock "56d0be20-ee15-4c11-afa6-1630ffed7bc4" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61962) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1012.222066] env[61962]: DEBUG oslo_concurrency.lockutils [None req-0c587f3b-20e0-453c-8616-e07485ee1c45 tempest-AttachVolumeNegativeTest-1446100430 tempest-AttachVolumeNegativeTest-1446100430-project-member] Lock "56d0be20-ee15-4c11-afa6-1630ffed7bc4" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61962) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1024.860913] env[61962]: DEBUG oslo_concurrency.lockutils [None req-9973fa98-c8f3-49d0-a7b2-f13de3c68c98 tempest-ServerActionsTestOtherB-2118315702 tempest-ServerActionsTestOtherB-2118315702-project-member] Acquiring lock "7dc97bc2-eaf6-4df0-bd77-d4ee263f3a48" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61962) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1024.861573] env[61962]: DEBUG oslo_concurrency.lockutils [None req-9973fa98-c8f3-49d0-a7b2-f13de3c68c98 tempest-ServerActionsTestOtherB-2118315702 tempest-ServerActionsTestOtherB-2118315702-project-member] Lock "7dc97bc2-eaf6-4df0-bd77-d4ee263f3a48" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=61962) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1028.192471] env[61962]: DEBUG oslo_concurrency.lockutils [None req-dfadb1ab-d2d4-4747-a921-c6338719a239 tempest-ServerDiskConfigTestJSON-1190480345 tempest-ServerDiskConfigTestJSON-1190480345-project-member] Acquiring lock "b3143d91-7fe7-45cf-9b90-c84d989394a3" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61962) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1028.192471] env[61962]: DEBUG oslo_concurrency.lockutils [None req-dfadb1ab-d2d4-4747-a921-c6338719a239 tempest-ServerDiskConfigTestJSON-1190480345 tempest-ServerDiskConfigTestJSON-1190480345-project-member] Lock "b3143d91-7fe7-45cf-9b90-c84d989394a3" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61962) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1031.627144] env[61962]: WARNING oslo_vmware.rw_handles [None req-719fe7db-a9f6-4ffa-bab2-00a4fa9c93c0 tempest-FloatingIPsAssociationTestJSON-152212675 tempest-FloatingIPsAssociationTestJSON-152212675-project-member] Error occurred while reading the HTTP response.: http.client.RemoteDisconnected: Remote end closed connection without response [ 1031.627144] env[61962]: ERROR oslo_vmware.rw_handles Traceback (most recent call last): [ 1031.627144] env[61962]: ERROR oslo_vmware.rw_handles File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py", line 283, in close [ 1031.627144] env[61962]: ERROR oslo_vmware.rw_handles self._conn.getresponse() [ 1031.627144] env[61962]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 1375, in getresponse [ 1031.627144] env[61962]: ERROR oslo_vmware.rw_handles response.begin() [ 1031.627144] env[61962]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 318, in begin [ 1031.627144] env[61962]: ERROR oslo_vmware.rw_handles version, status, reason = self._read_status() [ 1031.627144] env[61962]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 287, in _read_status [ 1031.627144] env[61962]: ERROR oslo_vmware.rw_handles raise RemoteDisconnected("Remote end closed connection without" [ 1031.627144] env[61962]: ERROR oslo_vmware.rw_handles http.client.RemoteDisconnected: Remote end closed connection without response [ 1031.627144] env[61962]: ERROR oslo_vmware.rw_handles [ 1031.627144] env[61962]: DEBUG nova.virt.vmwareapi.images [None req-719fe7db-a9f6-4ffa-bab2-00a4fa9c93c0 tempest-FloatingIPsAssociationTestJSON-152212675 tempest-FloatingIPsAssociationTestJSON-152212675-project-member] [instance: 667f56c0-896f-4e25-9e8a-3954f7733dc0] Downloaded image file data f684bb17-3ab2-4bd5-a19e-4c36c57d0ebe to vmware_temp/31fb1330-0f08-4bb2-8752-fb0e9ae57d01/f684bb17-3ab2-4bd5-a19e-4c36c57d0ebe/tmp-sparse.vmdk on the data store datastore2 {{(pid=61962) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:258}} [ 1031.629392] env[61962]: DEBUG nova.virt.vmwareapi.vmops [None req-719fe7db-a9f6-4ffa-bab2-00a4fa9c93c0 tempest-FloatingIPsAssociationTestJSON-152212675 tempest-FloatingIPsAssociationTestJSON-152212675-project-member] [instance: 667f56c0-896f-4e25-9e8a-3954f7733dc0] Caching image {{(pid=61962) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 1031.629642] env[61962]: DEBUG nova.virt.vmwareapi.vm_util [None req-719fe7db-a9f6-4ffa-bab2-00a4fa9c93c0 tempest-FloatingIPsAssociationTestJSON-152212675 tempest-FloatingIPsAssociationTestJSON-152212675-project-member] Copying Virtual Disk [datastore2] vmware_temp/31fb1330-0f08-4bb2-8752-fb0e9ae57d01/f684bb17-3ab2-4bd5-a19e-4c36c57d0ebe/tmp-sparse.vmdk to [datastore2] vmware_temp/31fb1330-0f08-4bb2-8752-fb0e9ae57d01/f684bb17-3ab2-4bd5-a19e-4c36c57d0ebe/f684bb17-3ab2-4bd5-a19e-4c36c57d0ebe.vmdk {{(pid=61962) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 1031.629924] env[61962]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-37f27aa8-14a3-4ec8-84ce-739ed5b04955 {{(pid=61962) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1031.638533] env[61962]: DEBUG oslo_vmware.api [None req-719fe7db-a9f6-4ffa-bab2-00a4fa9c93c0 tempest-FloatingIPsAssociationTestJSON-152212675 tempest-FloatingIPsAssociationTestJSON-152212675-project-member] Waiting for the task: (returnval){ [ 1031.638533] env[61962]: value = "task-4891992" [ 1031.638533] env[61962]: _type = "Task" [ 1031.638533] env[61962]: } to complete. {{(pid=61962) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1031.648299] env[61962]: DEBUG oslo_vmware.api [None req-719fe7db-a9f6-4ffa-bab2-00a4fa9c93c0 tempest-FloatingIPsAssociationTestJSON-152212675 tempest-FloatingIPsAssociationTestJSON-152212675-project-member] Task: {'id': task-4891992, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61962) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1032.152167] env[61962]: DEBUG oslo_vmware.exceptions [None req-719fe7db-a9f6-4ffa-bab2-00a4fa9c93c0 tempest-FloatingIPsAssociationTestJSON-152212675 tempest-FloatingIPsAssociationTestJSON-152212675-project-member] Fault InvalidArgument not matched. {{(pid=61962) get_fault_class /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/exceptions.py:290}} [ 1032.152548] env[61962]: DEBUG oslo_concurrency.lockutils [None req-719fe7db-a9f6-4ffa-bab2-00a4fa9c93c0 tempest-FloatingIPsAssociationTestJSON-152212675 tempest-FloatingIPsAssociationTestJSON-152212675-project-member] Releasing lock "[datastore2] devstack-image-cache_base/f684bb17-3ab2-4bd5-a19e-4c36c57d0ebe/f684bb17-3ab2-4bd5-a19e-4c36c57d0ebe.vmdk" {{(pid=61962) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1032.153167] env[61962]: ERROR nova.compute.manager [None req-719fe7db-a9f6-4ffa-bab2-00a4fa9c93c0 tempest-FloatingIPsAssociationTestJSON-152212675 tempest-FloatingIPsAssociationTestJSON-152212675-project-member] [instance: 667f56c0-896f-4e25-9e8a-3954f7733dc0] Instance failed to spawn: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1032.153167] env[61962]: Faults: ['InvalidArgument'] [ 1032.153167] env[61962]: ERROR nova.compute.manager [instance: 667f56c0-896f-4e25-9e8a-3954f7733dc0] Traceback (most recent call last): [ 1032.153167] env[61962]: ERROR nova.compute.manager [instance: 667f56c0-896f-4e25-9e8a-3954f7733dc0] File "/opt/stack/nova/nova/compute/manager.py", line 2886, in _build_resources [ 1032.153167] env[61962]: ERROR nova.compute.manager [instance: 667f56c0-896f-4e25-9e8a-3954f7733dc0] yield resources [ 1032.153167] env[61962]: ERROR nova.compute.manager [instance: 667f56c0-896f-4e25-9e8a-3954f7733dc0] File "/opt/stack/nova/nova/compute/manager.py", line 2633, in _build_and_run_instance [ 1032.153167] env[61962]: ERROR nova.compute.manager [instance: 667f56c0-896f-4e25-9e8a-3954f7733dc0] self.driver.spawn(context, instance, image_meta, [ 1032.153167] env[61962]: ERROR nova.compute.manager [instance: 667f56c0-896f-4e25-9e8a-3954f7733dc0] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 1032.153167] env[61962]: ERROR nova.compute.manager [instance: 667f56c0-896f-4e25-9e8a-3954f7733dc0] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1032.153167] env[61962]: ERROR nova.compute.manager [instance: 667f56c0-896f-4e25-9e8a-3954f7733dc0] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 1032.153167] env[61962]: ERROR nova.compute.manager [instance: 667f56c0-896f-4e25-9e8a-3954f7733dc0] self._fetch_image_if_missing(context, vi) [ 1032.153167] env[61962]: ERROR nova.compute.manager [instance: 667f56c0-896f-4e25-9e8a-3954f7733dc0] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 1032.153526] env[61962]: ERROR nova.compute.manager [instance: 667f56c0-896f-4e25-9e8a-3954f7733dc0] image_cache(vi, tmp_image_ds_loc) [ 1032.153526] env[61962]: ERROR nova.compute.manager [instance: 667f56c0-896f-4e25-9e8a-3954f7733dc0] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 1032.153526] env[61962]: ERROR nova.compute.manager [instance: 667f56c0-896f-4e25-9e8a-3954f7733dc0] vm_util.copy_virtual_disk( [ 1032.153526] env[61962]: ERROR nova.compute.manager [instance: 667f56c0-896f-4e25-9e8a-3954f7733dc0] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 1032.153526] env[61962]: ERROR nova.compute.manager [instance: 667f56c0-896f-4e25-9e8a-3954f7733dc0] session._wait_for_task(vmdk_copy_task) [ 1032.153526] env[61962]: ERROR nova.compute.manager [instance: 667f56c0-896f-4e25-9e8a-3954f7733dc0] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 1032.153526] env[61962]: ERROR nova.compute.manager [instance: 667f56c0-896f-4e25-9e8a-3954f7733dc0] return self.wait_for_task(task_ref) [ 1032.153526] env[61962]: ERROR nova.compute.manager [instance: 667f56c0-896f-4e25-9e8a-3954f7733dc0] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 1032.153526] env[61962]: ERROR nova.compute.manager [instance: 667f56c0-896f-4e25-9e8a-3954f7733dc0] return evt.wait() [ 1032.153526] env[61962]: ERROR nova.compute.manager [instance: 667f56c0-896f-4e25-9e8a-3954f7733dc0] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 1032.153526] env[61962]: ERROR nova.compute.manager [instance: 667f56c0-896f-4e25-9e8a-3954f7733dc0] result = hub.switch() [ 1032.153526] env[61962]: ERROR nova.compute.manager [instance: 667f56c0-896f-4e25-9e8a-3954f7733dc0] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 1032.153526] env[61962]: ERROR nova.compute.manager [instance: 667f56c0-896f-4e25-9e8a-3954f7733dc0] return self.greenlet.switch() [ 1032.153937] env[61962]: ERROR nova.compute.manager [instance: 667f56c0-896f-4e25-9e8a-3954f7733dc0] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 1032.153937] env[61962]: ERROR nova.compute.manager [instance: 667f56c0-896f-4e25-9e8a-3954f7733dc0] self.f(*self.args, **self.kw) [ 1032.153937] env[61962]: ERROR nova.compute.manager [instance: 667f56c0-896f-4e25-9e8a-3954f7733dc0] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 1032.153937] env[61962]: ERROR nova.compute.manager [instance: 667f56c0-896f-4e25-9e8a-3954f7733dc0] raise exceptions.translate_fault(task_info.error) [ 1032.153937] env[61962]: ERROR nova.compute.manager [instance: 667f56c0-896f-4e25-9e8a-3954f7733dc0] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1032.153937] env[61962]: ERROR nova.compute.manager [instance: 667f56c0-896f-4e25-9e8a-3954f7733dc0] Faults: ['InvalidArgument'] [ 1032.153937] env[61962]: ERROR nova.compute.manager [instance: 667f56c0-896f-4e25-9e8a-3954f7733dc0] [ 1032.153937] env[61962]: INFO nova.compute.manager [None req-719fe7db-a9f6-4ffa-bab2-00a4fa9c93c0 tempest-FloatingIPsAssociationTestJSON-152212675 tempest-FloatingIPsAssociationTestJSON-152212675-project-member] [instance: 667f56c0-896f-4e25-9e8a-3954f7733dc0] Terminating instance [ 1032.155832] env[61962]: DEBUG oslo_concurrency.lockutils [None req-4f0380d0-0d3b-4e8d-a9a6-09d47c765b7b tempest-VolumesAssistedSnapshotsTest-1134839840 tempest-VolumesAssistedSnapshotsTest-1134839840-project-member] Acquired lock "[datastore2] devstack-image-cache_base/f684bb17-3ab2-4bd5-a19e-4c36c57d0ebe/f684bb17-3ab2-4bd5-a19e-4c36c57d0ebe.vmdk" {{(pid=61962) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1032.155832] env[61962]: DEBUG nova.virt.vmwareapi.ds_util [None req-4f0380d0-0d3b-4e8d-a9a6-09d47c765b7b tempest-VolumesAssistedSnapshotsTest-1134839840 tempest-VolumesAssistedSnapshotsTest-1134839840-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=61962) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1032.155832] env[61962]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-d538168a-cf46-4a72-997f-c1e0dc6a08cd {{(pid=61962) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1032.159139] env[61962]: DEBUG nova.compute.manager [None req-719fe7db-a9f6-4ffa-bab2-00a4fa9c93c0 tempest-FloatingIPsAssociationTestJSON-152212675 tempest-FloatingIPsAssociationTestJSON-152212675-project-member] [instance: 667f56c0-896f-4e25-9e8a-3954f7733dc0] Start destroying the instance on the hypervisor. {{(pid=61962) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3142}} [ 1032.159573] env[61962]: DEBUG nova.virt.vmwareapi.vmops [None req-719fe7db-a9f6-4ffa-bab2-00a4fa9c93c0 tempest-FloatingIPsAssociationTestJSON-152212675 tempest-FloatingIPsAssociationTestJSON-152212675-project-member] [instance: 667f56c0-896f-4e25-9e8a-3954f7733dc0] Destroying instance {{(pid=61962) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1032.160559] env[61962]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1bcdf8f6-4a7a-4c39-a70b-c4f5a99b1638 {{(pid=61962) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1032.164944] env[61962]: DEBUG nova.virt.vmwareapi.ds_util [None req-4f0380d0-0d3b-4e8d-a9a6-09d47c765b7b tempest-VolumesAssistedSnapshotsTest-1134839840 tempest-VolumesAssistedSnapshotsTest-1134839840-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=61962) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1032.165135] env[61962]: DEBUG nova.virt.vmwareapi.vmops [None req-4f0380d0-0d3b-4e8d-a9a6-09d47c765b7b tempest-VolumesAssistedSnapshotsTest-1134839840 tempest-VolumesAssistedSnapshotsTest-1134839840-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=61962) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 1032.166153] env[61962]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-10843346-e251-4412-af55-14f686516b06 {{(pid=61962) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1032.170695] env[61962]: DEBUG nova.virt.vmwareapi.vmops [None req-719fe7db-a9f6-4ffa-bab2-00a4fa9c93c0 tempest-FloatingIPsAssociationTestJSON-152212675 tempest-FloatingIPsAssociationTestJSON-152212675-project-member] [instance: 667f56c0-896f-4e25-9e8a-3954f7733dc0] Unregistering the VM {{(pid=61962) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 1032.171259] env[61962]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-14d49d72-57b0-4276-9b61-4b599174e9e8 {{(pid=61962) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1032.174023] env[61962]: DEBUG oslo_vmware.api [None req-4f0380d0-0d3b-4e8d-a9a6-09d47c765b7b tempest-VolumesAssistedSnapshotsTest-1134839840 tempest-VolumesAssistedSnapshotsTest-1134839840-project-member] Waiting for the task: (returnval){ [ 1032.174023] env[61962]: value = "session[5210918f-aadc-9b29-42fa-0929c2e42627]5229a6ad-f098-f920-3c10-ab01aaffe0b9" [ 1032.174023] env[61962]: _type = "Task" [ 1032.174023] env[61962]: } to complete. {{(pid=61962) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1032.188525] env[61962]: DEBUG oslo_vmware.api [None req-4f0380d0-0d3b-4e8d-a9a6-09d47c765b7b tempest-VolumesAssistedSnapshotsTest-1134839840 tempest-VolumesAssistedSnapshotsTest-1134839840-project-member] Task: {'id': session[5210918f-aadc-9b29-42fa-0929c2e42627]5229a6ad-f098-f920-3c10-ab01aaffe0b9, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61962) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1032.243534] env[61962]: DEBUG nova.virt.vmwareapi.vmops [None req-719fe7db-a9f6-4ffa-bab2-00a4fa9c93c0 tempest-FloatingIPsAssociationTestJSON-152212675 tempest-FloatingIPsAssociationTestJSON-152212675-project-member] [instance: 667f56c0-896f-4e25-9e8a-3954f7733dc0] Unregistered the VM {{(pid=61962) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 1032.243764] env[61962]: DEBUG nova.virt.vmwareapi.vmops [None req-719fe7db-a9f6-4ffa-bab2-00a4fa9c93c0 tempest-FloatingIPsAssociationTestJSON-152212675 tempest-FloatingIPsAssociationTestJSON-152212675-project-member] [instance: 667f56c0-896f-4e25-9e8a-3954f7733dc0] Deleting contents of the VM from datastore datastore2 {{(pid=61962) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 1032.243955] env[61962]: DEBUG nova.virt.vmwareapi.ds_util [None req-719fe7db-a9f6-4ffa-bab2-00a4fa9c93c0 tempest-FloatingIPsAssociationTestJSON-152212675 tempest-FloatingIPsAssociationTestJSON-152212675-project-member] Deleting the datastore file [datastore2] 667f56c0-896f-4e25-9e8a-3954f7733dc0 {{(pid=61962) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1032.244362] env[61962]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-434c77d2-c781-4923-a29b-6d0ad314c76b {{(pid=61962) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1032.252854] env[61962]: DEBUG oslo_vmware.api [None req-719fe7db-a9f6-4ffa-bab2-00a4fa9c93c0 tempest-FloatingIPsAssociationTestJSON-152212675 tempest-FloatingIPsAssociationTestJSON-152212675-project-member] Waiting for the task: (returnval){ [ 1032.252854] env[61962]: value = "task-4891994" [ 1032.252854] env[61962]: _type = "Task" [ 1032.252854] env[61962]: } to complete. {{(pid=61962) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1032.262884] env[61962]: DEBUG oslo_vmware.api [None req-719fe7db-a9f6-4ffa-bab2-00a4fa9c93c0 tempest-FloatingIPsAssociationTestJSON-152212675 tempest-FloatingIPsAssociationTestJSON-152212675-project-member] Task: {'id': task-4891994, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61962) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1032.687057] env[61962]: DEBUG nova.virt.vmwareapi.vmops [None req-4f0380d0-0d3b-4e8d-a9a6-09d47c765b7b tempest-VolumesAssistedSnapshotsTest-1134839840 tempest-VolumesAssistedSnapshotsTest-1134839840-project-member] [instance: 295064f0-9677-4287-877a-ef2a33a87fef] Preparing fetch location {{(pid=61962) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 1032.687057] env[61962]: DEBUG nova.virt.vmwareapi.ds_util [None req-4f0380d0-0d3b-4e8d-a9a6-09d47c765b7b tempest-VolumesAssistedSnapshotsTest-1134839840 tempest-VolumesAssistedSnapshotsTest-1134839840-project-member] Creating directory with path [datastore2] vmware_temp/5fe03819-a358-4795-888b-e42be739abf5/f684bb17-3ab2-4bd5-a19e-4c36c57d0ebe {{(pid=61962) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1032.687057] env[61962]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-f445e0b3-6638-4bb8-ac59-8a1bc859ff7f {{(pid=61962) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1032.700765] env[61962]: DEBUG nova.virt.vmwareapi.ds_util [None req-4f0380d0-0d3b-4e8d-a9a6-09d47c765b7b tempest-VolumesAssistedSnapshotsTest-1134839840 tempest-VolumesAssistedSnapshotsTest-1134839840-project-member] Created directory with path [datastore2] vmware_temp/5fe03819-a358-4795-888b-e42be739abf5/f684bb17-3ab2-4bd5-a19e-4c36c57d0ebe {{(pid=61962) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1032.700985] env[61962]: DEBUG nova.virt.vmwareapi.vmops [None req-4f0380d0-0d3b-4e8d-a9a6-09d47c765b7b tempest-VolumesAssistedSnapshotsTest-1134839840 tempest-VolumesAssistedSnapshotsTest-1134839840-project-member] [instance: 295064f0-9677-4287-877a-ef2a33a87fef] Fetch image to [datastore2] vmware_temp/5fe03819-a358-4795-888b-e42be739abf5/f684bb17-3ab2-4bd5-a19e-4c36c57d0ebe/tmp-sparse.vmdk {{(pid=61962) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 1032.701171] env[61962]: DEBUG nova.virt.vmwareapi.vmops [None req-4f0380d0-0d3b-4e8d-a9a6-09d47c765b7b tempest-VolumesAssistedSnapshotsTest-1134839840 tempest-VolumesAssistedSnapshotsTest-1134839840-project-member] [instance: 295064f0-9677-4287-877a-ef2a33a87fef] Downloading image file data f684bb17-3ab2-4bd5-a19e-4c36c57d0ebe to [datastore2] vmware_temp/5fe03819-a358-4795-888b-e42be739abf5/f684bb17-3ab2-4bd5-a19e-4c36c57d0ebe/tmp-sparse.vmdk on the data store datastore2 {{(pid=61962) _fetch_image_as_file /opt/stack/nova/nova/virt/vmwareapi/vmops.py:399}} [ 1032.703940] env[61962]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fcc08b68-0e84-45d8-9c2d-e0784ba4f585 {{(pid=61962) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1032.712551] env[61962]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1984cda7-14a9-4f2f-8ecd-925157fd82d2 {{(pid=61962) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1032.724465] env[61962]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2e314a37-24cf-4b91-9aba-1e6795afd352 {{(pid=61962) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1032.765698] env[61962]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5a22c7bd-247f-4e06-9af2-b238be8e970e {{(pid=61962) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1032.776857] env[61962]: DEBUG oslo_vmware.service [-] Invoking SessionManager.AcquireGenericServiceTicket with opID=oslo.vmware-c6242a7b-3aa2-4213-b820-4972138dbc7b {{(pid=61962) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1032.778056] env[61962]: DEBUG oslo_vmware.api [None req-719fe7db-a9f6-4ffa-bab2-00a4fa9c93c0 tempest-FloatingIPsAssociationTestJSON-152212675 tempest-FloatingIPsAssociationTestJSON-152212675-project-member] Task: {'id': task-4891994, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.081416} completed successfully. {{(pid=61962) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1032.778161] env[61962]: DEBUG nova.virt.vmwareapi.ds_util [None req-719fe7db-a9f6-4ffa-bab2-00a4fa9c93c0 tempest-FloatingIPsAssociationTestJSON-152212675 tempest-FloatingIPsAssociationTestJSON-152212675-project-member] Deleted the datastore file {{(pid=61962) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1032.778287] env[61962]: DEBUG nova.virt.vmwareapi.vmops [None req-719fe7db-a9f6-4ffa-bab2-00a4fa9c93c0 tempest-FloatingIPsAssociationTestJSON-152212675 tempest-FloatingIPsAssociationTestJSON-152212675-project-member] [instance: 667f56c0-896f-4e25-9e8a-3954f7733dc0] Deleted contents of the VM from datastore datastore2 {{(pid=61962) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 1032.778471] env[61962]: DEBUG nova.virt.vmwareapi.vmops [None req-719fe7db-a9f6-4ffa-bab2-00a4fa9c93c0 tempest-FloatingIPsAssociationTestJSON-152212675 tempest-FloatingIPsAssociationTestJSON-152212675-project-member] [instance: 667f56c0-896f-4e25-9e8a-3954f7733dc0] Instance destroyed {{(pid=61962) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1032.778643] env[61962]: INFO nova.compute.manager [None req-719fe7db-a9f6-4ffa-bab2-00a4fa9c93c0 tempest-FloatingIPsAssociationTestJSON-152212675 tempest-FloatingIPsAssociationTestJSON-152212675-project-member] [instance: 667f56c0-896f-4e25-9e8a-3954f7733dc0] Took 0.62 seconds to destroy the instance on the hypervisor. [ 1032.781265] env[61962]: DEBUG nova.compute.claims [None req-719fe7db-a9f6-4ffa-bab2-00a4fa9c93c0 tempest-FloatingIPsAssociationTestJSON-152212675 tempest-FloatingIPsAssociationTestJSON-152212675-project-member] [instance: 667f56c0-896f-4e25-9e8a-3954f7733dc0] Aborting claim: {{(pid=61962) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 1032.781265] env[61962]: DEBUG oslo_concurrency.lockutils [None req-719fe7db-a9f6-4ffa-bab2-00a4fa9c93c0 tempest-FloatingIPsAssociationTestJSON-152212675 tempest-FloatingIPsAssociationTestJSON-152212675-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=61962) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1032.781406] env[61962]: DEBUG oslo_concurrency.lockutils [None req-719fe7db-a9f6-4ffa-bab2-00a4fa9c93c0 tempest-FloatingIPsAssociationTestJSON-152212675 tempest-FloatingIPsAssociationTestJSON-152212675-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 0.000s {{(pid=61962) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1032.803349] env[61962]: DEBUG nova.virt.vmwareapi.images [None req-4f0380d0-0d3b-4e8d-a9a6-09d47c765b7b tempest-VolumesAssistedSnapshotsTest-1134839840 tempest-VolumesAssistedSnapshotsTest-1134839840-project-member] [instance: 295064f0-9677-4287-877a-ef2a33a87fef] Downloading image file data f684bb17-3ab2-4bd5-a19e-4c36c57d0ebe to the data store datastore2 {{(pid=61962) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:245}} [ 1032.881998] env[61962]: DEBUG oslo_vmware.rw_handles [None req-4f0380d0-0d3b-4e8d-a9a6-09d47c765b7b tempest-VolumesAssistedSnapshotsTest-1134839840 tempest-VolumesAssistedSnapshotsTest-1134839840-project-member] Creating HTTP connection to write to file with size = 21318656 and URL = https://esx7c1n3.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/5fe03819-a358-4795-888b-e42be739abf5/f684bb17-3ab2-4bd5-a19e-4c36c57d0ebe/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=61962) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 1032.946584] env[61962]: DEBUG oslo_vmware.rw_handles [None req-4f0380d0-0d3b-4e8d-a9a6-09d47c765b7b tempest-VolumesAssistedSnapshotsTest-1134839840 tempest-VolumesAssistedSnapshotsTest-1134839840-project-member] Completed reading data from the image iterator. {{(pid=61962) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 1032.947024] env[61962]: DEBUG oslo_vmware.rw_handles [None req-4f0380d0-0d3b-4e8d-a9a6-09d47c765b7b tempest-VolumesAssistedSnapshotsTest-1134839840 tempest-VolumesAssistedSnapshotsTest-1134839840-project-member] Closing write handle for https://esx7c1n3.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/5fe03819-a358-4795-888b-e42be739abf5/f684bb17-3ab2-4bd5-a19e-4c36c57d0ebe/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=61962) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:281}} [ 1033.238544] env[61962]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f20c760b-8bf8-4c0b-99b3-7a1de86064f1 {{(pid=61962) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1033.246948] env[61962]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-368e2840-6bd4-4d3c-8936-0f8c69376d98 {{(pid=61962) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1033.276850] env[61962]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c89c91ff-d3c5-4a3a-953f-a505de8a1c8d {{(pid=61962) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1033.285165] env[61962]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-928a36ea-6645-48bd-b94d-a61724676a04 {{(pid=61962) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1033.300421] env[61962]: DEBUG nova.compute.provider_tree [None req-719fe7db-a9f6-4ffa-bab2-00a4fa9c93c0 tempest-FloatingIPsAssociationTestJSON-152212675 tempest-FloatingIPsAssociationTestJSON-152212675-project-member] Inventory has not changed in ProviderTree for provider: 5a20dc57-fddd-49ec-bab5-953a03eebaa1 {{(pid=61962) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1033.311151] env[61962]: DEBUG nova.scheduler.client.report [None req-719fe7db-a9f6-4ffa-bab2-00a4fa9c93c0 tempest-FloatingIPsAssociationTestJSON-152212675 tempest-FloatingIPsAssociationTestJSON-152212675-project-member] Inventory has not changed for provider 5a20dc57-fddd-49ec-bab5-953a03eebaa1 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 96, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61962) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1033.339452] env[61962]: DEBUG oslo_concurrency.lockutils [None req-719fe7db-a9f6-4ffa-bab2-00a4fa9c93c0 tempest-FloatingIPsAssociationTestJSON-152212675 tempest-FloatingIPsAssociationTestJSON-152212675-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 0.558s {{(pid=61962) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1033.339990] env[61962]: ERROR nova.compute.manager [None req-719fe7db-a9f6-4ffa-bab2-00a4fa9c93c0 tempest-FloatingIPsAssociationTestJSON-152212675 tempest-FloatingIPsAssociationTestJSON-152212675-project-member] [instance: 667f56c0-896f-4e25-9e8a-3954f7733dc0] Failed to build and run instance: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1033.339990] env[61962]: Faults: ['InvalidArgument'] [ 1033.339990] env[61962]: ERROR nova.compute.manager [instance: 667f56c0-896f-4e25-9e8a-3954f7733dc0] Traceback (most recent call last): [ 1033.339990] env[61962]: ERROR nova.compute.manager [instance: 667f56c0-896f-4e25-9e8a-3954f7733dc0] File "/opt/stack/nova/nova/compute/manager.py", line 2633, in _build_and_run_instance [ 1033.339990] env[61962]: ERROR nova.compute.manager [instance: 667f56c0-896f-4e25-9e8a-3954f7733dc0] self.driver.spawn(context, instance, image_meta, [ 1033.339990] env[61962]: ERROR nova.compute.manager [instance: 667f56c0-896f-4e25-9e8a-3954f7733dc0] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 1033.339990] env[61962]: ERROR nova.compute.manager [instance: 667f56c0-896f-4e25-9e8a-3954f7733dc0] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1033.339990] env[61962]: ERROR nova.compute.manager [instance: 667f56c0-896f-4e25-9e8a-3954f7733dc0] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 1033.339990] env[61962]: ERROR nova.compute.manager [instance: 667f56c0-896f-4e25-9e8a-3954f7733dc0] self._fetch_image_if_missing(context, vi) [ 1033.339990] env[61962]: ERROR nova.compute.manager [instance: 667f56c0-896f-4e25-9e8a-3954f7733dc0] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 1033.339990] env[61962]: ERROR nova.compute.manager [instance: 667f56c0-896f-4e25-9e8a-3954f7733dc0] image_cache(vi, tmp_image_ds_loc) [ 1033.339990] env[61962]: ERROR nova.compute.manager [instance: 667f56c0-896f-4e25-9e8a-3954f7733dc0] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 1033.340533] env[61962]: ERROR nova.compute.manager [instance: 667f56c0-896f-4e25-9e8a-3954f7733dc0] vm_util.copy_virtual_disk( [ 1033.340533] env[61962]: ERROR nova.compute.manager [instance: 667f56c0-896f-4e25-9e8a-3954f7733dc0] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 1033.340533] env[61962]: ERROR nova.compute.manager [instance: 667f56c0-896f-4e25-9e8a-3954f7733dc0] session._wait_for_task(vmdk_copy_task) [ 1033.340533] env[61962]: ERROR nova.compute.manager [instance: 667f56c0-896f-4e25-9e8a-3954f7733dc0] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 1033.340533] env[61962]: ERROR nova.compute.manager [instance: 667f56c0-896f-4e25-9e8a-3954f7733dc0] return self.wait_for_task(task_ref) [ 1033.340533] env[61962]: ERROR nova.compute.manager [instance: 667f56c0-896f-4e25-9e8a-3954f7733dc0] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 1033.340533] env[61962]: ERROR nova.compute.manager [instance: 667f56c0-896f-4e25-9e8a-3954f7733dc0] return evt.wait() [ 1033.340533] env[61962]: ERROR nova.compute.manager [instance: 667f56c0-896f-4e25-9e8a-3954f7733dc0] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 1033.340533] env[61962]: ERROR nova.compute.manager [instance: 667f56c0-896f-4e25-9e8a-3954f7733dc0] result = hub.switch() [ 1033.340533] env[61962]: ERROR nova.compute.manager [instance: 667f56c0-896f-4e25-9e8a-3954f7733dc0] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 1033.340533] env[61962]: ERROR nova.compute.manager [instance: 667f56c0-896f-4e25-9e8a-3954f7733dc0] return self.greenlet.switch() [ 1033.340533] env[61962]: ERROR nova.compute.manager [instance: 667f56c0-896f-4e25-9e8a-3954f7733dc0] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 1033.340533] env[61962]: ERROR nova.compute.manager [instance: 667f56c0-896f-4e25-9e8a-3954f7733dc0] self.f(*self.args, **self.kw) [ 1033.341312] env[61962]: ERROR nova.compute.manager [instance: 667f56c0-896f-4e25-9e8a-3954f7733dc0] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 1033.341312] env[61962]: ERROR nova.compute.manager [instance: 667f56c0-896f-4e25-9e8a-3954f7733dc0] raise exceptions.translate_fault(task_info.error) [ 1033.341312] env[61962]: ERROR nova.compute.manager [instance: 667f56c0-896f-4e25-9e8a-3954f7733dc0] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1033.341312] env[61962]: ERROR nova.compute.manager [instance: 667f56c0-896f-4e25-9e8a-3954f7733dc0] Faults: ['InvalidArgument'] [ 1033.341312] env[61962]: ERROR nova.compute.manager [instance: 667f56c0-896f-4e25-9e8a-3954f7733dc0] [ 1033.341312] env[61962]: DEBUG nova.compute.utils [None req-719fe7db-a9f6-4ffa-bab2-00a4fa9c93c0 tempest-FloatingIPsAssociationTestJSON-152212675 tempest-FloatingIPsAssociationTestJSON-152212675-project-member] [instance: 667f56c0-896f-4e25-9e8a-3954f7733dc0] VimFaultException {{(pid=61962) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 1033.342281] env[61962]: DEBUG nova.compute.manager [None req-719fe7db-a9f6-4ffa-bab2-00a4fa9c93c0 tempest-FloatingIPsAssociationTestJSON-152212675 tempest-FloatingIPsAssociationTestJSON-152212675-project-member] [instance: 667f56c0-896f-4e25-9e8a-3954f7733dc0] Build of instance 667f56c0-896f-4e25-9e8a-3954f7733dc0 was re-scheduled: A specified parameter was not correct: fileType [ 1033.342281] env[61962]: Faults: ['InvalidArgument'] {{(pid=61962) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2472}} [ 1033.342662] env[61962]: DEBUG nova.compute.manager [None req-719fe7db-a9f6-4ffa-bab2-00a4fa9c93c0 tempest-FloatingIPsAssociationTestJSON-152212675 tempest-FloatingIPsAssociationTestJSON-152212675-project-member] [instance: 667f56c0-896f-4e25-9e8a-3954f7733dc0] Unplugging VIFs for instance {{(pid=61962) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:2998}} [ 1033.342847] env[61962]: DEBUG nova.compute.manager [None req-719fe7db-a9f6-4ffa-bab2-00a4fa9c93c0 tempest-FloatingIPsAssociationTestJSON-152212675 tempest-FloatingIPsAssociationTestJSON-152212675-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=61962) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3021}} [ 1033.343034] env[61962]: DEBUG nova.compute.manager [None req-719fe7db-a9f6-4ffa-bab2-00a4fa9c93c0 tempest-FloatingIPsAssociationTestJSON-152212675 tempest-FloatingIPsAssociationTestJSON-152212675-project-member] [instance: 667f56c0-896f-4e25-9e8a-3954f7733dc0] Deallocating network for instance {{(pid=61962) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2281}} [ 1033.343218] env[61962]: DEBUG nova.network.neutron [None req-719fe7db-a9f6-4ffa-bab2-00a4fa9c93c0 tempest-FloatingIPsAssociationTestJSON-152212675 tempest-FloatingIPsAssociationTestJSON-152212675-project-member] [instance: 667f56c0-896f-4e25-9e8a-3954f7733dc0] deallocate_for_instance() {{(pid=61962) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1033.975916] env[61962]: DEBUG nova.network.neutron [None req-719fe7db-a9f6-4ffa-bab2-00a4fa9c93c0 tempest-FloatingIPsAssociationTestJSON-152212675 tempest-FloatingIPsAssociationTestJSON-152212675-project-member] [instance: 667f56c0-896f-4e25-9e8a-3954f7733dc0] Updating instance_info_cache with network_info: [] {{(pid=61962) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1034.000272] env[61962]: INFO nova.compute.manager [None req-719fe7db-a9f6-4ffa-bab2-00a4fa9c93c0 tempest-FloatingIPsAssociationTestJSON-152212675 tempest-FloatingIPsAssociationTestJSON-152212675-project-member] [instance: 667f56c0-896f-4e25-9e8a-3954f7733dc0] Took 0.66 seconds to deallocate network for instance. [ 1034.136020] env[61962]: INFO nova.scheduler.client.report [None req-719fe7db-a9f6-4ffa-bab2-00a4fa9c93c0 tempest-FloatingIPsAssociationTestJSON-152212675 tempest-FloatingIPsAssociationTestJSON-152212675-project-member] Deleted allocations for instance 667f56c0-896f-4e25-9e8a-3954f7733dc0 [ 1034.167198] env[61962]: DEBUG oslo_concurrency.lockutils [None req-719fe7db-a9f6-4ffa-bab2-00a4fa9c93c0 tempest-FloatingIPsAssociationTestJSON-152212675 tempest-FloatingIPsAssociationTestJSON-152212675-project-member] Lock "667f56c0-896f-4e25-9e8a-3954f7733dc0" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 459.675s {{(pid=61962) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1034.167198] env[61962]: DEBUG oslo_concurrency.lockutils [None req-0cd9e5a1-1981-46d4-a08b-3dfe0db9f6ee tempest-FloatingIPsAssociationTestJSON-152212675 tempest-FloatingIPsAssociationTestJSON-152212675-project-member] Lock "667f56c0-896f-4e25-9e8a-3954f7733dc0" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 262.465s {{(pid=61962) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1034.167198] env[61962]: DEBUG oslo_concurrency.lockutils [None req-0cd9e5a1-1981-46d4-a08b-3dfe0db9f6ee tempest-FloatingIPsAssociationTestJSON-152212675 tempest-FloatingIPsAssociationTestJSON-152212675-project-member] Acquiring lock "667f56c0-896f-4e25-9e8a-3954f7733dc0-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=61962) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1034.167385] env[61962]: DEBUG oslo_concurrency.lockutils [None req-0cd9e5a1-1981-46d4-a08b-3dfe0db9f6ee tempest-FloatingIPsAssociationTestJSON-152212675 tempest-FloatingIPsAssociationTestJSON-152212675-project-member] Lock "667f56c0-896f-4e25-9e8a-3954f7733dc0-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.001s {{(pid=61962) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1034.167385] env[61962]: DEBUG oslo_concurrency.lockutils [None req-0cd9e5a1-1981-46d4-a08b-3dfe0db9f6ee tempest-FloatingIPsAssociationTestJSON-152212675 tempest-FloatingIPsAssociationTestJSON-152212675-project-member] Lock "667f56c0-896f-4e25-9e8a-3954f7733dc0-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=61962) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1034.169662] env[61962]: INFO nova.compute.manager [None req-0cd9e5a1-1981-46d4-a08b-3dfe0db9f6ee tempest-FloatingIPsAssociationTestJSON-152212675 tempest-FloatingIPsAssociationTestJSON-152212675-project-member] [instance: 667f56c0-896f-4e25-9e8a-3954f7733dc0] Terminating instance [ 1034.173625] env[61962]: DEBUG nova.compute.manager [None req-0cd9e5a1-1981-46d4-a08b-3dfe0db9f6ee tempest-FloatingIPsAssociationTestJSON-152212675 tempest-FloatingIPsAssociationTestJSON-152212675-project-member] [instance: 667f56c0-896f-4e25-9e8a-3954f7733dc0] Start destroying the instance on the hypervisor. {{(pid=61962) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3142}} [ 1034.173979] env[61962]: DEBUG nova.virt.vmwareapi.vmops [None req-0cd9e5a1-1981-46d4-a08b-3dfe0db9f6ee tempest-FloatingIPsAssociationTestJSON-152212675 tempest-FloatingIPsAssociationTestJSON-152212675-project-member] [instance: 667f56c0-896f-4e25-9e8a-3954f7733dc0] Destroying instance {{(pid=61962) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1034.174372] env[61962]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-861b2748-4cd4-4990-9594-1bd252fc55a9 {{(pid=61962) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1034.184474] env[61962]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8ff14e1c-d129-4591-a984-5a41b4fd18da {{(pid=61962) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1034.197963] env[61962]: DEBUG nova.compute.manager [None req-0175fd8f-fe26-4651-8a2d-2c4f726c7c6a tempest-ListImageFiltersTestJSON-1372204851 tempest-ListImageFiltersTestJSON-1372204851-project-member] [instance: c17b6733-edea-4db9-b937-c38585eb92bf] Starting instance... {{(pid=61962) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2424}} [ 1034.224903] env[61962]: WARNING nova.virt.vmwareapi.vmops [None req-0cd9e5a1-1981-46d4-a08b-3dfe0db9f6ee tempest-FloatingIPsAssociationTestJSON-152212675 tempest-FloatingIPsAssociationTestJSON-152212675-project-member] [instance: 667f56c0-896f-4e25-9e8a-3954f7733dc0] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 667f56c0-896f-4e25-9e8a-3954f7733dc0 could not be found. [ 1034.227064] env[61962]: DEBUG nova.virt.vmwareapi.vmops [None req-0cd9e5a1-1981-46d4-a08b-3dfe0db9f6ee tempest-FloatingIPsAssociationTestJSON-152212675 tempest-FloatingIPsAssociationTestJSON-152212675-project-member] [instance: 667f56c0-896f-4e25-9e8a-3954f7733dc0] Instance destroyed {{(pid=61962) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1034.227064] env[61962]: INFO nova.compute.manager [None req-0cd9e5a1-1981-46d4-a08b-3dfe0db9f6ee tempest-FloatingIPsAssociationTestJSON-152212675 tempest-FloatingIPsAssociationTestJSON-152212675-project-member] [instance: 667f56c0-896f-4e25-9e8a-3954f7733dc0] Took 0.05 seconds to destroy the instance on the hypervisor. [ 1034.227064] env[61962]: DEBUG oslo.service.loopingcall [None req-0cd9e5a1-1981-46d4-a08b-3dfe0db9f6ee tempest-FloatingIPsAssociationTestJSON-152212675 tempest-FloatingIPsAssociationTestJSON-152212675-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61962) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1034.227064] env[61962]: DEBUG nova.compute.manager [-] [instance: 667f56c0-896f-4e25-9e8a-3954f7733dc0] Deallocating network for instance {{(pid=61962) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2281}} [ 1034.227064] env[61962]: DEBUG nova.network.neutron [-] [instance: 667f56c0-896f-4e25-9e8a-3954f7733dc0] deallocate_for_instance() {{(pid=61962) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1034.244029] env[61962]: DEBUG nova.compute.manager [None req-0175fd8f-fe26-4651-8a2d-2c4f726c7c6a tempest-ListImageFiltersTestJSON-1372204851 tempest-ListImageFiltersTestJSON-1372204851-project-member] [instance: c17b6733-edea-4db9-b937-c38585eb92bf] Instance disappeared before build. {{(pid=61962) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2431}} [ 1034.276572] env[61962]: DEBUG oslo_concurrency.lockutils [None req-0175fd8f-fe26-4651-8a2d-2c4f726c7c6a tempest-ListImageFiltersTestJSON-1372204851 tempest-ListImageFiltersTestJSON-1372204851-project-member] Lock "c17b6733-edea-4db9-b937-c38585eb92bf" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 233.070s {{(pid=61962) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1034.278772] env[61962]: DEBUG nova.network.neutron [-] [instance: 667f56c0-896f-4e25-9e8a-3954f7733dc0] Updating instance_info_cache with network_info: [] {{(pid=61962) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1034.293036] env[61962]: DEBUG nova.compute.manager [None req-f7b0ed63-6aaa-45c6-9e72-e7a859bd8ce6 tempest-AttachInterfacesUnderV243Test-760923567 tempest-AttachInterfacesUnderV243Test-760923567-project-member] [instance: 1228fef1-3837-4a1b-8c88-fc1e27af6b4f] Starting instance... {{(pid=61962) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2424}} [ 1034.298520] env[61962]: INFO nova.compute.manager [-] [instance: 667f56c0-896f-4e25-9e8a-3954f7733dc0] Took 0.07 seconds to deallocate network for instance. [ 1034.325628] env[61962]: DEBUG nova.compute.manager [None req-f7b0ed63-6aaa-45c6-9e72-e7a859bd8ce6 tempest-AttachInterfacesUnderV243Test-760923567 tempest-AttachInterfacesUnderV243Test-760923567-project-member] [instance: 1228fef1-3837-4a1b-8c88-fc1e27af6b4f] Instance disappeared before build. {{(pid=61962) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2431}} [ 1034.349991] env[61962]: DEBUG oslo_concurrency.lockutils [None req-f7b0ed63-6aaa-45c6-9e72-e7a859bd8ce6 tempest-AttachInterfacesUnderV243Test-760923567 tempest-AttachInterfacesUnderV243Test-760923567-project-member] Lock "1228fef1-3837-4a1b-8c88-fc1e27af6b4f" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 233.051s {{(pid=61962) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1034.367481] env[61962]: DEBUG nova.compute.manager [None req-26263581-648d-4e26-9469-10ed46e2edfa tempest-ListImageFiltersTestJSON-1372204851 tempest-ListImageFiltersTestJSON-1372204851-project-member] [instance: 66eb1344-b692-4f1c-a5ac-b3518e350741] Starting instance... {{(pid=61962) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2424}} [ 1034.397823] env[61962]: DEBUG nova.compute.manager [None req-26263581-648d-4e26-9469-10ed46e2edfa tempest-ListImageFiltersTestJSON-1372204851 tempest-ListImageFiltersTestJSON-1372204851-project-member] [instance: 66eb1344-b692-4f1c-a5ac-b3518e350741] Instance disappeared before build. {{(pid=61962) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2431}} [ 1034.427224] env[61962]: DEBUG oslo_concurrency.lockutils [None req-0cd9e5a1-1981-46d4-a08b-3dfe0db9f6ee tempest-FloatingIPsAssociationTestJSON-152212675 tempest-FloatingIPsAssociationTestJSON-152212675-project-member] Lock "667f56c0-896f-4e25-9e8a-3954f7733dc0" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 0.261s {{(pid=61962) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1034.430899] env[61962]: DEBUG oslo_concurrency.lockutils [None req-26263581-648d-4e26-9469-10ed46e2edfa tempest-ListImageFiltersTestJSON-1372204851 tempest-ListImageFiltersTestJSON-1372204851-project-member] Lock "66eb1344-b692-4f1c-a5ac-b3518e350741" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 232.003s {{(pid=61962) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1034.444020] env[61962]: DEBUG nova.compute.manager [None req-a2e8f290-e090-4273-9cff-81aa5b83e324 tempest-ServerShowV257Test-37650740 tempest-ServerShowV257Test-37650740-project-member] [instance: b33ecac7-cb2f-438c-9ee4-7cd53388e318] Starting instance... {{(pid=61962) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2424}} [ 1034.468827] env[61962]: DEBUG nova.compute.manager [None req-a2e8f290-e090-4273-9cff-81aa5b83e324 tempest-ServerShowV257Test-37650740 tempest-ServerShowV257Test-37650740-project-member] [instance: b33ecac7-cb2f-438c-9ee4-7cd53388e318] Instance disappeared before build. {{(pid=61962) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2431}} [ 1034.494163] env[61962]: DEBUG oslo_concurrency.lockutils [None req-a2e8f290-e090-4273-9cff-81aa5b83e324 tempest-ServerShowV257Test-37650740 tempest-ServerShowV257Test-37650740-project-member] Lock "b33ecac7-cb2f-438c-9ee4-7cd53388e318" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 226.859s {{(pid=61962) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1034.534179] env[61962]: DEBUG nova.compute.manager [None req-253842ad-c195-4a95-92ed-04dcd3f5a28e tempest-SecurityGroupsTestJSON-26720826 tempest-SecurityGroupsTestJSON-26720826-project-member] [instance: 252f4add-476d-46bc-b369-c8d66a512142] Starting instance... {{(pid=61962) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2424}} [ 1034.566686] env[61962]: DEBUG nova.compute.manager [None req-253842ad-c195-4a95-92ed-04dcd3f5a28e tempest-SecurityGroupsTestJSON-26720826 tempest-SecurityGroupsTestJSON-26720826-project-member] [instance: 252f4add-476d-46bc-b369-c8d66a512142] Instance disappeared before build. {{(pid=61962) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2431}} [ 1034.627897] env[61962]: DEBUG oslo_concurrency.lockutils [None req-253842ad-c195-4a95-92ed-04dcd3f5a28e tempest-SecurityGroupsTestJSON-26720826 tempest-SecurityGroupsTestJSON-26720826-project-member] Lock "252f4add-476d-46bc-b369-c8d66a512142" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 226.177s {{(pid=61962) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1034.644911] env[61962]: DEBUG nova.compute.manager [None req-286ae033-ae0a-4fe3-af73-4278631f960b tempest-ServerMetadataNegativeTestJSON-1558107376 tempest-ServerMetadataNegativeTestJSON-1558107376-project-member] [instance: 647d141e-1aaf-4e5e-b0a3-31c9bee0adf7] Starting instance... {{(pid=61962) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2424}} [ 1034.671845] env[61962]: DEBUG nova.compute.manager [None req-286ae033-ae0a-4fe3-af73-4278631f960b tempest-ServerMetadataNegativeTestJSON-1558107376 tempest-ServerMetadataNegativeTestJSON-1558107376-project-member] [instance: 647d141e-1aaf-4e5e-b0a3-31c9bee0adf7] Instance disappeared before build. {{(pid=61962) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2431}} [ 1034.700603] env[61962]: DEBUG oslo_concurrency.lockutils [None req-286ae033-ae0a-4fe3-af73-4278631f960b tempest-ServerMetadataNegativeTestJSON-1558107376 tempest-ServerMetadataNegativeTestJSON-1558107376-project-member] Lock "647d141e-1aaf-4e5e-b0a3-31c9bee0adf7" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 225.708s {{(pid=61962) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1034.712378] env[61962]: DEBUG nova.compute.manager [None req-0d94c3c7-4f36-4ce6-a743-2e7b40d62c83 tempest-ServersAaction247Test-1090469211 tempest-ServersAaction247Test-1090469211-project-member] [instance: fb33811b-242a-4cce-ae11-c004e507e7de] Starting instance... {{(pid=61962) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2424}} [ 1034.740927] env[61962]: DEBUG nova.compute.manager [None req-0d94c3c7-4f36-4ce6-a743-2e7b40d62c83 tempest-ServersAaction247Test-1090469211 tempest-ServersAaction247Test-1090469211-project-member] [instance: fb33811b-242a-4cce-ae11-c004e507e7de] Instance disappeared before build. {{(pid=61962) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2431}} [ 1034.773614] env[61962]: DEBUG oslo_concurrency.lockutils [None req-0d94c3c7-4f36-4ce6-a743-2e7b40d62c83 tempest-ServersAaction247Test-1090469211 tempest-ServersAaction247Test-1090469211-project-member] Lock "fb33811b-242a-4cce-ae11-c004e507e7de" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 221.913s {{(pid=61962) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1034.789718] env[61962]: DEBUG nova.compute.manager [None req-a0c5d3ad-fa00-4fd1-9a99-033a2c9ba886 tempest-ServerAddressesNegativeTestJSON-1340443941 tempest-ServerAddressesNegativeTestJSON-1340443941-project-member] [instance: c5eca2f9-f569-4510-882b-217c2c28ec69] Starting instance... {{(pid=61962) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2424}} [ 1034.816988] env[61962]: DEBUG nova.compute.manager [None req-a0c5d3ad-fa00-4fd1-9a99-033a2c9ba886 tempest-ServerAddressesNegativeTestJSON-1340443941 tempest-ServerAddressesNegativeTestJSON-1340443941-project-member] [instance: c5eca2f9-f569-4510-882b-217c2c28ec69] Instance disappeared before build. {{(pid=61962) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2431}} [ 1034.844189] env[61962]: DEBUG oslo_concurrency.lockutils [None req-a0c5d3ad-fa00-4fd1-9a99-033a2c9ba886 tempest-ServerAddressesNegativeTestJSON-1340443941 tempest-ServerAddressesNegativeTestJSON-1340443941-project-member] Lock "c5eca2f9-f569-4510-882b-217c2c28ec69" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 217.076s {{(pid=61962) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1034.863275] env[61962]: DEBUG nova.compute.manager [None req-437f1d6e-ae3f-4ed0-b52a-453235395d7b tempest-ServersTestMultiNic-318652074 tempest-ServersTestMultiNic-318652074-project-member] [instance: 684d7213-d30a-4881-8e13-9817d6765074] Starting instance... {{(pid=61962) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2424}} [ 1034.888881] env[61962]: DEBUG nova.compute.manager [None req-437f1d6e-ae3f-4ed0-b52a-453235395d7b tempest-ServersTestMultiNic-318652074 tempest-ServersTestMultiNic-318652074-project-member] [instance: 684d7213-d30a-4881-8e13-9817d6765074] Instance disappeared before build. {{(pid=61962) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2431}} [ 1034.914346] env[61962]: DEBUG oslo_concurrency.lockutils [None req-437f1d6e-ae3f-4ed0-b52a-453235395d7b tempest-ServersTestMultiNic-318652074 tempest-ServersTestMultiNic-318652074-project-member] Lock "684d7213-d30a-4881-8e13-9817d6765074" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 215.904s {{(pid=61962) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1034.928549] env[61962]: DEBUG nova.compute.manager [None req-74d29d12-cd52-44c6-8ebf-65c67fcd6be5 tempest-AttachVolumeTestJSON-1002857752 tempest-AttachVolumeTestJSON-1002857752-project-member] [instance: 9cf159b3-53c8-4002-9e9b-5c0f82e1e555] Starting instance... {{(pid=61962) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2424}} [ 1034.966222] env[61962]: DEBUG nova.compute.manager [None req-74d29d12-cd52-44c6-8ebf-65c67fcd6be5 tempest-AttachVolumeTestJSON-1002857752 tempest-AttachVolumeTestJSON-1002857752-project-member] [instance: 9cf159b3-53c8-4002-9e9b-5c0f82e1e555] Instance disappeared before build. {{(pid=61962) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2431}} [ 1034.998606] env[61962]: DEBUG oslo_concurrency.lockutils [None req-74d29d12-cd52-44c6-8ebf-65c67fcd6be5 tempest-AttachVolumeTestJSON-1002857752 tempest-AttachVolumeTestJSON-1002857752-project-member] Lock "9cf159b3-53c8-4002-9e9b-5c0f82e1e555" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 209.060s {{(pid=61962) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1035.009169] env[61962]: DEBUG nova.compute.manager [None req-26c84f68-530e-4672-82ee-12b17ec1b4aa tempest-ServerDiskConfigTestJSON-1190480345 tempest-ServerDiskConfigTestJSON-1190480345-project-member] [instance: 306d093c-40fa-4863-b725-b3fc65042dce] Starting instance... {{(pid=61962) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2424}} [ 1035.036972] env[61962]: DEBUG nova.compute.manager [None req-26c84f68-530e-4672-82ee-12b17ec1b4aa tempest-ServerDiskConfigTestJSON-1190480345 tempest-ServerDiskConfigTestJSON-1190480345-project-member] [instance: 306d093c-40fa-4863-b725-b3fc65042dce] Instance disappeared before build. {{(pid=61962) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2431}} [ 1035.065523] env[61962]: DEBUG oslo_concurrency.lockutils [None req-26c84f68-530e-4672-82ee-12b17ec1b4aa tempest-ServerDiskConfigTestJSON-1190480345 tempest-ServerDiskConfigTestJSON-1190480345-project-member] Lock "306d093c-40fa-4863-b725-b3fc65042dce" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 203.374s {{(pid=61962) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1035.080777] env[61962]: DEBUG nova.compute.manager [None req-36bc805c-1195-4d2a-9935-feb61c69e14d tempest-InstanceActionsTestJSON-347250734 tempest-InstanceActionsTestJSON-347250734-project-member] [instance: f53ed31d-4cca-418d-b708-4797dbc5920f] Starting instance... {{(pid=61962) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2424}} [ 1035.110202] env[61962]: DEBUG nova.compute.manager [None req-36bc805c-1195-4d2a-9935-feb61c69e14d tempest-InstanceActionsTestJSON-347250734 tempest-InstanceActionsTestJSON-347250734-project-member] [instance: f53ed31d-4cca-418d-b708-4797dbc5920f] Instance disappeared before build. {{(pid=61962) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2431}} [ 1035.135195] env[61962]: DEBUG oslo_concurrency.lockutils [None req-36bc805c-1195-4d2a-9935-feb61c69e14d tempest-InstanceActionsTestJSON-347250734 tempest-InstanceActionsTestJSON-347250734-project-member] Lock "f53ed31d-4cca-418d-b708-4797dbc5920f" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 203.439s {{(pid=61962) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1035.148623] env[61962]: DEBUG nova.compute.manager [None req-78c50ddb-9a5d-49c4-8b81-91657e42e15a tempest-ServersTestJSON-410785708 tempest-ServersTestJSON-410785708-project-member] [instance: 8ecd73a7-7f23-4994-85b9-479584831687] Starting instance... {{(pid=61962) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2424}} [ 1035.212824] env[61962]: DEBUG oslo_concurrency.lockutils [None req-78c50ddb-9a5d-49c4-8b81-91657e42e15a tempest-ServersTestJSON-410785708 tempest-ServersTestJSON-410785708-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61962) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1035.213064] env[61962]: DEBUG oslo_concurrency.lockutils [None req-78c50ddb-9a5d-49c4-8b81-91657e42e15a tempest-ServersTestJSON-410785708 tempest-ServersTestJSON-410785708-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=61962) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1035.214767] env[61962]: INFO nova.compute.claims [None req-78c50ddb-9a5d-49c4-8b81-91657e42e15a tempest-ServersTestJSON-410785708 tempest-ServersTestJSON-410785708-project-member] [instance: 8ecd73a7-7f23-4994-85b9-479584831687] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1035.584175] env[61962]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d92637db-6182-4e67-a147-ec422769975b {{(pid=61962) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1035.593180] env[61962]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b7a9fc21-edfa-44f2-b9b8-f05a2d2d377b {{(pid=61962) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1035.627026] env[61962]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3cdde105-6c7c-4cd3-aa3e-a7c6b943a683 {{(pid=61962) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1035.635201] env[61962]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f2535649-7e55-4119-bae1-429bcfedb46d {{(pid=61962) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1035.648611] env[61962]: DEBUG nova.compute.provider_tree [None req-78c50ddb-9a5d-49c4-8b81-91657e42e15a tempest-ServersTestJSON-410785708 tempest-ServersTestJSON-410785708-project-member] Inventory has not changed in ProviderTree for provider: 5a20dc57-fddd-49ec-bab5-953a03eebaa1 {{(pid=61962) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1035.660206] env[61962]: DEBUG nova.scheduler.client.report [None req-78c50ddb-9a5d-49c4-8b81-91657e42e15a tempest-ServersTestJSON-410785708 tempest-ServersTestJSON-410785708-project-member] Inventory has not changed for provider 5a20dc57-fddd-49ec-bab5-953a03eebaa1 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 96, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61962) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1035.684992] env[61962]: DEBUG oslo_concurrency.lockutils [None req-78c50ddb-9a5d-49c4-8b81-91657e42e15a tempest-ServersTestJSON-410785708 tempest-ServersTestJSON-410785708-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.472s {{(pid=61962) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1035.685553] env[61962]: DEBUG nova.compute.manager [None req-78c50ddb-9a5d-49c4-8b81-91657e42e15a tempest-ServersTestJSON-410785708 tempest-ServersTestJSON-410785708-project-member] [instance: 8ecd73a7-7f23-4994-85b9-479584831687] Start building networks asynchronously for instance. {{(pid=61962) _build_resources /opt/stack/nova/nova/compute/manager.py:2821}} [ 1035.730026] env[61962]: DEBUG nova.compute.utils [None req-78c50ddb-9a5d-49c4-8b81-91657e42e15a tempest-ServersTestJSON-410785708 tempest-ServersTestJSON-410785708-project-member] Using /dev/sd instead of None {{(pid=61962) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1035.731208] env[61962]: DEBUG nova.compute.manager [None req-78c50ddb-9a5d-49c4-8b81-91657e42e15a tempest-ServersTestJSON-410785708 tempest-ServersTestJSON-410785708-project-member] [instance: 8ecd73a7-7f23-4994-85b9-479584831687] Allocating IP information in the background. {{(pid=61962) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1973}} [ 1035.731498] env[61962]: DEBUG nova.network.neutron [None req-78c50ddb-9a5d-49c4-8b81-91657e42e15a tempest-ServersTestJSON-410785708 tempest-ServersTestJSON-410785708-project-member] [instance: 8ecd73a7-7f23-4994-85b9-479584831687] allocate_for_instance() {{(pid=61962) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 1035.741486] env[61962]: DEBUG nova.compute.manager [None req-78c50ddb-9a5d-49c4-8b81-91657e42e15a tempest-ServersTestJSON-410785708 tempest-ServersTestJSON-410785708-project-member] [instance: 8ecd73a7-7f23-4994-85b9-479584831687] Start building block device mappings for instance. {{(pid=61962) _build_resources /opt/stack/nova/nova/compute/manager.py:2856}} [ 1035.816022] env[61962]: DEBUG nova.compute.manager [None req-78c50ddb-9a5d-49c4-8b81-91657e42e15a tempest-ServersTestJSON-410785708 tempest-ServersTestJSON-410785708-project-member] [instance: 8ecd73a7-7f23-4994-85b9-479584831687] Start spawning the instance on the hypervisor. {{(pid=61962) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2630}} [ 1035.840104] env[61962]: DEBUG nova.policy [None req-78c50ddb-9a5d-49c4-8b81-91657e42e15a tempest-ServersTestJSON-410785708 tempest-ServersTestJSON-410785708-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '1e3f533c97754898a3d3517cddf4fb0b', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '8e79e89d66b44fa092d586334732c0be', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61962) authorize /opt/stack/nova/nova/policy.py:203}} [ 1035.843273] env[61962]: DEBUG nova.virt.hardware [None req-78c50ddb-9a5d-49c4-8b81-91657e42e15a tempest-ServersTestJSON-410785708 tempest-ServersTestJSON-410785708-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-12-01T12:09:23Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=128,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-12-01T12:09:07Z,direct_url=,disk_format='vmdk',id=f684bb17-3ab2-4bd5-a19e-4c36c57d0ebe,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='eef556fb5c4f49b889491ae31a496de5',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-12-01T12:09:08Z,virtual_size=,visibility=), allow threads: False {{(pid=61962) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1035.843723] env[61962]: DEBUG nova.virt.hardware [None req-78c50ddb-9a5d-49c4-8b81-91657e42e15a tempest-ServersTestJSON-410785708 tempest-ServersTestJSON-410785708-project-member] Flavor limits 0:0:0 {{(pid=61962) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1035.844470] env[61962]: DEBUG nova.virt.hardware [None req-78c50ddb-9a5d-49c4-8b81-91657e42e15a tempest-ServersTestJSON-410785708 tempest-ServersTestJSON-410785708-project-member] Image limits 0:0:0 {{(pid=61962) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1035.844470] env[61962]: DEBUG nova.virt.hardware [None req-78c50ddb-9a5d-49c4-8b81-91657e42e15a tempest-ServersTestJSON-410785708 tempest-ServersTestJSON-410785708-project-member] Flavor pref 0:0:0 {{(pid=61962) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1035.844470] env[61962]: DEBUG nova.virt.hardware [None req-78c50ddb-9a5d-49c4-8b81-91657e42e15a tempest-ServersTestJSON-410785708 tempest-ServersTestJSON-410785708-project-member] Image pref 0:0:0 {{(pid=61962) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1035.845174] env[61962]: DEBUG nova.virt.hardware [None req-78c50ddb-9a5d-49c4-8b81-91657e42e15a tempest-ServersTestJSON-410785708 tempest-ServersTestJSON-410785708-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61962) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1035.849018] env[61962]: DEBUG nova.virt.hardware [None req-78c50ddb-9a5d-49c4-8b81-91657e42e15a tempest-ServersTestJSON-410785708 tempest-ServersTestJSON-410785708-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61962) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1035.849018] env[61962]: DEBUG nova.virt.hardware [None req-78c50ddb-9a5d-49c4-8b81-91657e42e15a tempest-ServersTestJSON-410785708 tempest-ServersTestJSON-410785708-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61962) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1035.849018] env[61962]: DEBUG nova.virt.hardware [None req-78c50ddb-9a5d-49c4-8b81-91657e42e15a tempest-ServersTestJSON-410785708 tempest-ServersTestJSON-410785708-project-member] Got 1 possible topologies {{(pid=61962) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1035.849018] env[61962]: DEBUG nova.virt.hardware [None req-78c50ddb-9a5d-49c4-8b81-91657e42e15a tempest-ServersTestJSON-410785708 tempest-ServersTestJSON-410785708-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61962) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1035.849018] env[61962]: DEBUG nova.virt.hardware [None req-78c50ddb-9a5d-49c4-8b81-91657e42e15a tempest-ServersTestJSON-410785708 tempest-ServersTestJSON-410785708-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61962) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1035.849384] env[61962]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0acf178a-dcf6-4645-83bf-72a9f2ba5ce0 {{(pid=61962) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1035.856835] env[61962]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d72ca35e-f82b-4fa8-b310-c55e84ba4dd7 {{(pid=61962) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1036.373021] env[61962]: DEBUG nova.network.neutron [None req-78c50ddb-9a5d-49c4-8b81-91657e42e15a tempest-ServersTestJSON-410785708 tempest-ServersTestJSON-410785708-project-member] [instance: 8ecd73a7-7f23-4994-85b9-479584831687] Successfully created port: 7997f306-f8bc-4dd8-99d1-a5211d02a000 {{(pid=61962) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1037.048759] env[61962]: DEBUG oslo_concurrency.lockutils [None req-b2413e23-7cb0-4588-8f03-0e3d7761bb40 tempest-ServersTestJSON-410785708 tempest-ServersTestJSON-410785708-project-member] Acquiring lock "8ecd73a7-7f23-4994-85b9-479584831687" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=61962) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1037.181104] env[61962]: DEBUG nova.compute.manager [req-c220d7f8-c9af-42d2-8388-cb302ee56aff req-7602a0b7-2d62-4595-8ef7-a342845e0777 service nova] [instance: 8ecd73a7-7f23-4994-85b9-479584831687] Received event network-vif-plugged-7997f306-f8bc-4dd8-99d1-a5211d02a000 {{(pid=61962) external_instance_event /opt/stack/nova/nova/compute/manager.py:11210}} [ 1037.181327] env[61962]: DEBUG oslo_concurrency.lockutils [req-c220d7f8-c9af-42d2-8388-cb302ee56aff req-7602a0b7-2d62-4595-8ef7-a342845e0777 service nova] Acquiring lock "8ecd73a7-7f23-4994-85b9-479584831687-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=61962) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1037.181532] env[61962]: DEBUG oslo_concurrency.lockutils [req-c220d7f8-c9af-42d2-8388-cb302ee56aff req-7602a0b7-2d62-4595-8ef7-a342845e0777 service nova] Lock "8ecd73a7-7f23-4994-85b9-479584831687-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=61962) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1037.183038] env[61962]: DEBUG oslo_concurrency.lockutils [req-c220d7f8-c9af-42d2-8388-cb302ee56aff req-7602a0b7-2d62-4595-8ef7-a342845e0777 service nova] Lock "8ecd73a7-7f23-4994-85b9-479584831687-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=61962) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1037.183338] env[61962]: DEBUG nova.compute.manager [req-c220d7f8-c9af-42d2-8388-cb302ee56aff req-7602a0b7-2d62-4595-8ef7-a342845e0777 service nova] [instance: 8ecd73a7-7f23-4994-85b9-479584831687] No waiting events found dispatching network-vif-plugged-7997f306-f8bc-4dd8-99d1-a5211d02a000 {{(pid=61962) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1037.183590] env[61962]: WARNING nova.compute.manager [req-c220d7f8-c9af-42d2-8388-cb302ee56aff req-7602a0b7-2d62-4595-8ef7-a342845e0777 service nova] [instance: 8ecd73a7-7f23-4994-85b9-479584831687] Received unexpected event network-vif-plugged-7997f306-f8bc-4dd8-99d1-a5211d02a000 for instance with vm_state building and task_state deleting. [ 1037.238061] env[61962]: DEBUG nova.network.neutron [None req-78c50ddb-9a5d-49c4-8b81-91657e42e15a tempest-ServersTestJSON-410785708 tempest-ServersTestJSON-410785708-project-member] [instance: 8ecd73a7-7f23-4994-85b9-479584831687] Successfully updated port: 7997f306-f8bc-4dd8-99d1-a5211d02a000 {{(pid=61962) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1037.274150] env[61962]: DEBUG oslo_concurrency.lockutils [None req-78c50ddb-9a5d-49c4-8b81-91657e42e15a tempest-ServersTestJSON-410785708 tempest-ServersTestJSON-410785708-project-member] Acquiring lock "refresh_cache-8ecd73a7-7f23-4994-85b9-479584831687" {{(pid=61962) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1037.274296] env[61962]: DEBUG oslo_concurrency.lockutils [None req-78c50ddb-9a5d-49c4-8b81-91657e42e15a tempest-ServersTestJSON-410785708 tempest-ServersTestJSON-410785708-project-member] Acquired lock "refresh_cache-8ecd73a7-7f23-4994-85b9-479584831687" {{(pid=61962) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1037.274450] env[61962]: DEBUG nova.network.neutron [None req-78c50ddb-9a5d-49c4-8b81-91657e42e15a tempest-ServersTestJSON-410785708 tempest-ServersTestJSON-410785708-project-member] [instance: 8ecd73a7-7f23-4994-85b9-479584831687] Building network info cache for instance {{(pid=61962) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 1037.343288] env[61962]: DEBUG nova.network.neutron [None req-78c50ddb-9a5d-49c4-8b81-91657e42e15a tempest-ServersTestJSON-410785708 tempest-ServersTestJSON-410785708-project-member] [instance: 8ecd73a7-7f23-4994-85b9-479584831687] Instance cache missing network info. {{(pid=61962) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 1037.613271] env[61962]: DEBUG nova.network.neutron [None req-78c50ddb-9a5d-49c4-8b81-91657e42e15a tempest-ServersTestJSON-410785708 tempest-ServersTestJSON-410785708-project-member] [instance: 8ecd73a7-7f23-4994-85b9-479584831687] Updating instance_info_cache with network_info: [{"id": "7997f306-f8bc-4dd8-99d1-a5211d02a000", "address": "fa:16:3e:a8:dd:d9", "network": {"id": "8f09d3fb-202f-4bfa-987c-034a5868c257", "bridge": "br-int", "label": "tempest-ServersTestJSON-448338774-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "8e79e89d66b44fa092d586334732c0be", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "20fbc86c-0935-406c-9659-3ffc33fc0d08", "external-id": "nsx-vlan-transportzone-191", "segmentation_id": 191, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap7997f306-f8", "ovs_interfaceid": "7997f306-f8bc-4dd8-99d1-a5211d02a000", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61962) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1037.628278] env[61962]: DEBUG oslo_concurrency.lockutils [None req-78c50ddb-9a5d-49c4-8b81-91657e42e15a tempest-ServersTestJSON-410785708 tempest-ServersTestJSON-410785708-project-member] Releasing lock "refresh_cache-8ecd73a7-7f23-4994-85b9-479584831687" {{(pid=61962) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1037.628545] env[61962]: DEBUG nova.compute.manager [None req-78c50ddb-9a5d-49c4-8b81-91657e42e15a tempest-ServersTestJSON-410785708 tempest-ServersTestJSON-410785708-project-member] [instance: 8ecd73a7-7f23-4994-85b9-479584831687] Instance network_info: |[{"id": "7997f306-f8bc-4dd8-99d1-a5211d02a000", "address": "fa:16:3e:a8:dd:d9", "network": {"id": "8f09d3fb-202f-4bfa-987c-034a5868c257", "bridge": "br-int", "label": "tempest-ServersTestJSON-448338774-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "8e79e89d66b44fa092d586334732c0be", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "20fbc86c-0935-406c-9659-3ffc33fc0d08", "external-id": "nsx-vlan-transportzone-191", "segmentation_id": 191, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap7997f306-f8", "ovs_interfaceid": "7997f306-f8bc-4dd8-99d1-a5211d02a000", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=61962) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1037.629017] env[61962]: DEBUG nova.virt.vmwareapi.vmops [None req-78c50ddb-9a5d-49c4-8b81-91657e42e15a tempest-ServersTestJSON-410785708 tempest-ServersTestJSON-410785708-project-member] [instance: 8ecd73a7-7f23-4994-85b9-479584831687] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:a8:dd:d9', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '20fbc86c-0935-406c-9659-3ffc33fc0d08', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '7997f306-f8bc-4dd8-99d1-a5211d02a000', 'vif_model': 'vmxnet3'}] {{(pid=61962) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1037.636489] env[61962]: DEBUG nova.virt.vmwareapi.vm_util [None req-78c50ddb-9a5d-49c4-8b81-91657e42e15a tempest-ServersTestJSON-410785708 tempest-ServersTestJSON-410785708-project-member] Creating folder: Project (8e79e89d66b44fa092d586334732c0be). Parent ref: group-v953327. {{(pid=61962) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 1037.637155] env[61962]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-6070579d-263e-4531-98e0-e81235aec273 {{(pid=61962) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1037.649222] env[61962]: INFO nova.virt.vmwareapi.vm_util [None req-78c50ddb-9a5d-49c4-8b81-91657e42e15a tempest-ServersTestJSON-410785708 tempest-ServersTestJSON-410785708-project-member] Created folder: Project (8e79e89d66b44fa092d586334732c0be) in parent group-v953327. [ 1037.649425] env[61962]: DEBUG nova.virt.vmwareapi.vm_util [None req-78c50ddb-9a5d-49c4-8b81-91657e42e15a tempest-ServersTestJSON-410785708 tempest-ServersTestJSON-410785708-project-member] Creating folder: Instances. Parent ref: group-v953387. {{(pid=61962) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 1037.649673] env[61962]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-3052e471-2a66-4a9f-bf40-f1373704a96a {{(pid=61962) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1037.661703] env[61962]: INFO nova.virt.vmwareapi.vm_util [None req-78c50ddb-9a5d-49c4-8b81-91657e42e15a tempest-ServersTestJSON-410785708 tempest-ServersTestJSON-410785708-project-member] Created folder: Instances in parent group-v953387. [ 1037.661962] env[61962]: DEBUG oslo.service.loopingcall [None req-78c50ddb-9a5d-49c4-8b81-91657e42e15a tempest-ServersTestJSON-410785708 tempest-ServersTestJSON-410785708-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=61962) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1037.662254] env[61962]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 8ecd73a7-7f23-4994-85b9-479584831687] Creating VM on the ESX host {{(pid=61962) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 1037.662412] env[61962]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-59fe9240-b3e4-4182-b7ef-efdf1f44b7e3 {{(pid=61962) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1037.683667] env[61962]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1037.683667] env[61962]: value = "task-4891997" [ 1037.683667] env[61962]: _type = "Task" [ 1037.683667] env[61962]: } to complete. {{(pid=61962) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1037.691725] env[61962]: DEBUG oslo_vmware.api [-] Task: {'id': task-4891997, 'name': CreateVM_Task} progress is 0%. {{(pid=61962) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1038.200652] env[61962]: DEBUG oslo_vmware.api [-] Task: {'id': task-4891997, 'name': CreateVM_Task, 'duration_secs': 0.317503} completed successfully. {{(pid=61962) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1038.200652] env[61962]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 8ecd73a7-7f23-4994-85b9-479584831687] Created VM on the ESX host {{(pid=61962) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 1038.201000] env[61962]: DEBUG oslo_concurrency.lockutils [None req-78c50ddb-9a5d-49c4-8b81-91657e42e15a tempest-ServersTestJSON-410785708 tempest-ServersTestJSON-410785708-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/f684bb17-3ab2-4bd5-a19e-4c36c57d0ebe" {{(pid=61962) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1038.201259] env[61962]: DEBUG oslo_concurrency.lockutils [None req-78c50ddb-9a5d-49c4-8b81-91657e42e15a tempest-ServersTestJSON-410785708 tempest-ServersTestJSON-410785708-project-member] Acquired lock "[datastore2] devstack-image-cache_base/f684bb17-3ab2-4bd5-a19e-4c36c57d0ebe" {{(pid=61962) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1038.201637] env[61962]: DEBUG oslo_concurrency.lockutils [None req-78c50ddb-9a5d-49c4-8b81-91657e42e15a tempest-ServersTestJSON-410785708 tempest-ServersTestJSON-410785708-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/f684bb17-3ab2-4bd5-a19e-4c36c57d0ebe" {{(pid=61962) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1038.201756] env[61962]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-3abfd191-3bd6-403a-9968-ea7f25340851 {{(pid=61962) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1038.207300] env[61962]: DEBUG oslo_vmware.api [None req-78c50ddb-9a5d-49c4-8b81-91657e42e15a tempest-ServersTestJSON-410785708 tempest-ServersTestJSON-410785708-project-member] Waiting for the task: (returnval){ [ 1038.207300] env[61962]: value = "session[5210918f-aadc-9b29-42fa-0929c2e42627]525a95ab-54b4-b38e-0bf0-ab17336388a7" [ 1038.207300] env[61962]: _type = "Task" [ 1038.207300] env[61962]: } to complete. {{(pid=61962) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1038.223058] env[61962]: DEBUG oslo_vmware.api [None req-78c50ddb-9a5d-49c4-8b81-91657e42e15a tempest-ServersTestJSON-410785708 tempest-ServersTestJSON-410785708-project-member] Task: {'id': session[5210918f-aadc-9b29-42fa-0929c2e42627]525a95ab-54b4-b38e-0bf0-ab17336388a7, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61962) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1038.718511] env[61962]: DEBUG oslo_concurrency.lockutils [None req-78c50ddb-9a5d-49c4-8b81-91657e42e15a tempest-ServersTestJSON-410785708 tempest-ServersTestJSON-410785708-project-member] Releasing lock "[datastore2] devstack-image-cache_base/f684bb17-3ab2-4bd5-a19e-4c36c57d0ebe" {{(pid=61962) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1038.718825] env[61962]: DEBUG nova.virt.vmwareapi.vmops [None req-78c50ddb-9a5d-49c4-8b81-91657e42e15a tempest-ServersTestJSON-410785708 tempest-ServersTestJSON-410785708-project-member] [instance: 8ecd73a7-7f23-4994-85b9-479584831687] Processing image f684bb17-3ab2-4bd5-a19e-4c36c57d0ebe {{(pid=61962) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1038.718825] env[61962]: DEBUG oslo_concurrency.lockutils [None req-78c50ddb-9a5d-49c4-8b81-91657e42e15a tempest-ServersTestJSON-410785708 tempest-ServersTestJSON-410785708-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/f684bb17-3ab2-4bd5-a19e-4c36c57d0ebe/f684bb17-3ab2-4bd5-a19e-4c36c57d0ebe.vmdk" {{(pid=61962) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1039.233934] env[61962]: DEBUG nova.compute.manager [req-c78bb16f-0f3e-4efc-94bd-b0dcd0c40eea req-af235cba-e9cb-4a6b-8766-95c6dbac631e service nova] [instance: 8ecd73a7-7f23-4994-85b9-479584831687] Received event network-changed-7997f306-f8bc-4dd8-99d1-a5211d02a000 {{(pid=61962) external_instance_event /opt/stack/nova/nova/compute/manager.py:11210}} [ 1039.234361] env[61962]: DEBUG nova.compute.manager [req-c78bb16f-0f3e-4efc-94bd-b0dcd0c40eea req-af235cba-e9cb-4a6b-8766-95c6dbac631e service nova] [instance: 8ecd73a7-7f23-4994-85b9-479584831687] Refreshing instance network info cache due to event network-changed-7997f306-f8bc-4dd8-99d1-a5211d02a000. {{(pid=61962) external_instance_event /opt/stack/nova/nova/compute/manager.py:11215}} [ 1039.234361] env[61962]: DEBUG oslo_concurrency.lockutils [req-c78bb16f-0f3e-4efc-94bd-b0dcd0c40eea req-af235cba-e9cb-4a6b-8766-95c6dbac631e service nova] Acquiring lock "refresh_cache-8ecd73a7-7f23-4994-85b9-479584831687" {{(pid=61962) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1039.235080] env[61962]: DEBUG oslo_concurrency.lockutils [req-c78bb16f-0f3e-4efc-94bd-b0dcd0c40eea req-af235cba-e9cb-4a6b-8766-95c6dbac631e service nova] Acquired lock "refresh_cache-8ecd73a7-7f23-4994-85b9-479584831687" {{(pid=61962) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1039.235080] env[61962]: DEBUG nova.network.neutron [req-c78bb16f-0f3e-4efc-94bd-b0dcd0c40eea req-af235cba-e9cb-4a6b-8766-95c6dbac631e service nova] [instance: 8ecd73a7-7f23-4994-85b9-479584831687] Refreshing network info cache for port 7997f306-f8bc-4dd8-99d1-a5211d02a000 {{(pid=61962) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 1039.577227] env[61962]: DEBUG nova.network.neutron [req-c78bb16f-0f3e-4efc-94bd-b0dcd0c40eea req-af235cba-e9cb-4a6b-8766-95c6dbac631e service nova] [instance: 8ecd73a7-7f23-4994-85b9-479584831687] Updated VIF entry in instance network info cache for port 7997f306-f8bc-4dd8-99d1-a5211d02a000. {{(pid=61962) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 1039.577595] env[61962]: DEBUG nova.network.neutron [req-c78bb16f-0f3e-4efc-94bd-b0dcd0c40eea req-af235cba-e9cb-4a6b-8766-95c6dbac631e service nova] [instance: 8ecd73a7-7f23-4994-85b9-479584831687] Updating instance_info_cache with network_info: [{"id": "7997f306-f8bc-4dd8-99d1-a5211d02a000", "address": "fa:16:3e:a8:dd:d9", "network": {"id": "8f09d3fb-202f-4bfa-987c-034a5868c257", "bridge": "br-int", "label": "tempest-ServersTestJSON-448338774-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "8e79e89d66b44fa092d586334732c0be", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "20fbc86c-0935-406c-9659-3ffc33fc0d08", "external-id": "nsx-vlan-transportzone-191", "segmentation_id": 191, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap7997f306-f8", "ovs_interfaceid": "7997f306-f8bc-4dd8-99d1-a5211d02a000", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61962) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1039.587741] env[61962]: DEBUG oslo_concurrency.lockutils [req-c78bb16f-0f3e-4efc-94bd-b0dcd0c40eea req-af235cba-e9cb-4a6b-8766-95c6dbac631e service nova] Releasing lock "refresh_cache-8ecd73a7-7f23-4994-85b9-479584831687" {{(pid=61962) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1041.158654] env[61962]: DEBUG oslo_service.periodic_task [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Running periodic task ComputeManager.update_available_resource {{(pid=61962) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1041.170452] env[61962]: DEBUG oslo_concurrency.lockutils [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=61962) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1041.170670] env[61962]: DEBUG oslo_concurrency.lockutils [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=61962) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1041.170875] env[61962]: DEBUG oslo_concurrency.lockutils [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=61962) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1041.170996] env[61962]: DEBUG nova.compute.resource_tracker [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=61962) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 1041.173359] env[61962]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e4455c14-b0a2-4b4e-b726-481644e80a03 {{(pid=61962) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1041.181638] env[61962]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f4b0e924-a111-41b1-8d3c-38c807cb360d {{(pid=61962) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1041.197860] env[61962]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6d61ac41-bd6b-4c56-aa90-b8907f4e3b32 {{(pid=61962) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1041.205136] env[61962]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-03a1b5f4-7e4e-46a9-97ca-a9d4f88005b1 {{(pid=61962) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1041.234265] env[61962]: DEBUG nova.compute.resource_tracker [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=180573MB free_disk=96GB free_vcpus=48 pci_devices=None {{(pid=61962) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 1041.234414] env[61962]: DEBUG oslo_concurrency.lockutils [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=61962) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1041.234605] env[61962]: DEBUG oslo_concurrency.lockutils [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=61962) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1041.331974] env[61962]: DEBUG nova.compute.resource_tracker [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Instance 295064f0-9677-4287-877a-ef2a33a87fef actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61962) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 1041.332170] env[61962]: DEBUG nova.compute.resource_tracker [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Instance 6996289c-f881-4733-8943-98e3633e43ca actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61962) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 1041.332299] env[61962]: DEBUG nova.compute.resource_tracker [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Instance 85c6b6af-f1b5-47ca-8e02-61c3669ec02b actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61962) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 1041.332418] env[61962]: DEBUG nova.compute.resource_tracker [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Instance 0aa340f7-a219-47f5-9d7d-dd4062643cf7 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61962) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 1041.332533] env[61962]: DEBUG nova.compute.resource_tracker [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Instance ff7d9da5-bc9a-41f2-a2e4-521d87b22117 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61962) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 1041.332648] env[61962]: DEBUG nova.compute.resource_tracker [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Instance bf82c44c-95ce-4e4c-a32f-444bba1ef5cd actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61962) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 1041.332762] env[61962]: DEBUG nova.compute.resource_tracker [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Instance 1dcafe3b-989b-4290-be54-ceccc62a323f actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61962) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 1041.332876] env[61962]: DEBUG nova.compute.resource_tracker [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Instance 454eebcc-ea18-4046-ba2e-01de1c8a954a actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61962) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 1041.332986] env[61962]: DEBUG nova.compute.resource_tracker [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Instance e76b5908-3bad-4c19-90e7-b15ca253ad80 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61962) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 1041.333112] env[61962]: DEBUG nova.compute.resource_tracker [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Instance 8ecd73a7-7f23-4994-85b9-479584831687 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61962) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 1041.347670] env[61962]: DEBUG nova.compute.resource_tracker [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Instance d89abe69-dd71-4cfe-b80b-3b2aeba0b8a9 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61962) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1767}} [ 1041.358561] env[61962]: DEBUG nova.compute.resource_tracker [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Instance 5572a1d3-5d2f-4af5-b9f6-ec57506e2069 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61962) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1767}} [ 1041.370389] env[61962]: DEBUG nova.compute.resource_tracker [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Instance 98c88a0d-07d4-4bec-8722-0afe732886ac has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61962) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1767}} [ 1041.383772] env[61962]: DEBUG nova.compute.resource_tracker [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Instance b42bffa2-bf04-4287-9ae8-8cbbe107893a has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61962) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1767}} [ 1041.395191] env[61962]: DEBUG nova.compute.resource_tracker [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Instance a47d03b4-71aa-43e9-bfa4-d2a07ea89fb8 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61962) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1767}} [ 1041.405812] env[61962]: DEBUG nova.compute.resource_tracker [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Instance 2a085633-9439-464b-b0c7-00e68ed46fb3 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61962) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1767}} [ 1041.416934] env[61962]: DEBUG nova.compute.resource_tracker [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Instance d970d822-12ab-43cc-8d34-abc6eff8a68d has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61962) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1767}} [ 1041.429058] env[61962]: DEBUG nova.compute.resource_tracker [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Instance 233cd3d2-df58-4826-9bb1-a93167d4cfa4 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61962) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1767}} [ 1041.440874] env[61962]: DEBUG nova.compute.resource_tracker [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Instance 56d0be20-ee15-4c11-afa6-1630ffed7bc4 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61962) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1767}} [ 1041.452663] env[61962]: DEBUG nova.compute.resource_tracker [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Instance 7dc97bc2-eaf6-4df0-bd77-d4ee263f3a48 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61962) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1767}} [ 1041.463416] env[61962]: DEBUG nova.compute.resource_tracker [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Instance b3143d91-7fe7-45cf-9b90-c84d989394a3 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61962) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1767}} [ 1041.463682] env[61962]: DEBUG nova.compute.resource_tracker [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Total usable vcpus: 48, total allocated vcpus: 10 {{(pid=61962) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 1041.463901] env[61962]: DEBUG nova.compute.resource_tracker [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=1792MB phys_disk=100GB used_disk=10GB total_vcpus=48 used_vcpus=10 pci_stats=[] stats={'failed_builds': '40', 'num_instances': '10', 'num_vm_building': '10', 'num_task_deleting': '10', 'num_os_type_None': '10', 'num_proj_78e44aa9cd5642928627210447b3e177': '1', 'io_workload': '10', 'num_proj_b4cf0a2155644882b4bf42e69e210138': '1', 'num_proj_ed943c69c73d48e2b1b2bab311295d84': '2', 'num_proj_5481169540de4205a1ff7731132eb3a7': '1', 'num_proj_389fb8d4c2ce47829fe23a6ca60e9bdd': '1', 'num_proj_7738f0dd32484bf9978c5d006d088c3b': '1', 'num_proj_a31ea93dd586435ebb82c1dbfd14c2ad': '1', 'num_proj_3285f5bbd10d495e852d0ef0c0047847': '1', 'num_proj_8e79e89d66b44fa092d586334732c0be': '1'} {{(pid=61962) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 1041.744661] env[61962]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-55a068ed-b6e5-45be-ba1f-0da43842391e {{(pid=61962) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1041.753025] env[61962]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-190a3f94-6431-4f09-9785-6e28951bb6fd {{(pid=61962) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1041.785283] env[61962]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-716d1ce3-4c93-447a-b537-7fb7f710e1a1 {{(pid=61962) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1041.793633] env[61962]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8c776bb1-453e-4cd1-b3c8-9cef31aa8b89 {{(pid=61962) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1041.807796] env[61962]: DEBUG nova.compute.provider_tree [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Inventory has not changed in ProviderTree for provider: 5a20dc57-fddd-49ec-bab5-953a03eebaa1 {{(pid=61962) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1041.817783] env[61962]: DEBUG nova.scheduler.client.report [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Inventory has not changed for provider 5a20dc57-fddd-49ec-bab5-953a03eebaa1 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 96, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61962) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1041.832699] env[61962]: DEBUG nova.compute.resource_tracker [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=61962) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 1041.832905] env[61962]: DEBUG oslo_concurrency.lockutils [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 0.598s {{(pid=61962) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1042.832899] env[61962]: DEBUG oslo_service.periodic_task [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=61962) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1042.833170] env[61962]: DEBUG nova.compute.manager [None req-ba337279-4320-40ca-b616-7e590432f203 None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=61962) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10626}} [ 1044.153736] env[61962]: DEBUG oslo_service.periodic_task [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=61962) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1045.158617] env[61962]: DEBUG oslo_service.periodic_task [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=61962) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1047.158987] env[61962]: DEBUG oslo_service.periodic_task [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=61962) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1047.158987] env[61962]: DEBUG nova.compute.manager [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Starting heal instance info cache {{(pid=61962) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10007}} [ 1047.158987] env[61962]: DEBUG nova.compute.manager [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Rebuilding the list of instances to heal {{(pid=61962) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10011}} [ 1047.179426] env[61962]: DEBUG nova.compute.manager [None req-ba337279-4320-40ca-b616-7e590432f203 None None] [instance: 295064f0-9677-4287-877a-ef2a33a87fef] Skipping network cache update for instance because it is Building. {{(pid=61962) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10020}} [ 1047.179570] env[61962]: DEBUG nova.compute.manager [None req-ba337279-4320-40ca-b616-7e590432f203 None None] [instance: 6996289c-f881-4733-8943-98e3633e43ca] Skipping network cache update for instance because it is Building. {{(pid=61962) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10020}} [ 1047.179722] env[61962]: DEBUG nova.compute.manager [None req-ba337279-4320-40ca-b616-7e590432f203 None None] [instance: 85c6b6af-f1b5-47ca-8e02-61c3669ec02b] Skipping network cache update for instance because it is Building. {{(pid=61962) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10020}} [ 1047.179827] env[61962]: DEBUG nova.compute.manager [None req-ba337279-4320-40ca-b616-7e590432f203 None None] [instance: 0aa340f7-a219-47f5-9d7d-dd4062643cf7] Skipping network cache update for instance because it is Building. {{(pid=61962) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10020}} [ 1047.179950] env[61962]: DEBUG nova.compute.manager [None req-ba337279-4320-40ca-b616-7e590432f203 None None] [instance: ff7d9da5-bc9a-41f2-a2e4-521d87b22117] Skipping network cache update for instance because it is Building. {{(pid=61962) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10020}} [ 1047.180090] env[61962]: DEBUG nova.compute.manager [None req-ba337279-4320-40ca-b616-7e590432f203 None None] [instance: bf82c44c-95ce-4e4c-a32f-444bba1ef5cd] Skipping network cache update for instance because it is Building. {{(pid=61962) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10020}} [ 1047.180253] env[61962]: DEBUG nova.compute.manager [None req-ba337279-4320-40ca-b616-7e590432f203 None None] [instance: 1dcafe3b-989b-4290-be54-ceccc62a323f] Skipping network cache update for instance because it is Building. {{(pid=61962) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10020}} [ 1047.180365] env[61962]: DEBUG nova.compute.manager [None req-ba337279-4320-40ca-b616-7e590432f203 None None] [instance: 454eebcc-ea18-4046-ba2e-01de1c8a954a] Skipping network cache update for instance because it is Building. {{(pid=61962) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10020}} [ 1047.180484] env[61962]: DEBUG nova.compute.manager [None req-ba337279-4320-40ca-b616-7e590432f203 None None] [instance: e76b5908-3bad-4c19-90e7-b15ca253ad80] Skipping network cache update for instance because it is Building. {{(pid=61962) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10020}} [ 1047.180599] env[61962]: DEBUG nova.compute.manager [None req-ba337279-4320-40ca-b616-7e590432f203 None None] [instance: 8ecd73a7-7f23-4994-85b9-479584831687] Skipping network cache update for instance because it is Building. {{(pid=61962) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10020}} [ 1047.180716] env[61962]: DEBUG nova.compute.manager [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Didn't find any instances for network info cache update. {{(pid=61962) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10093}} [ 1048.158061] env[61962]: DEBUG oslo_service.periodic_task [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=61962) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1048.158061] env[61962]: DEBUG oslo_service.periodic_task [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=61962) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1049.158562] env[61962]: DEBUG oslo_service.periodic_task [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=61962) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1049.158868] env[61962]: DEBUG oslo_service.periodic_task [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=61962) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1058.081980] env[61962]: DEBUG oslo_concurrency.lockutils [None req-673cd958-9a31-4e2b-aba4-288eb9a7a931 tempest-ServersTestManualDisk-1747286721 tempest-ServersTestManualDisk-1747286721-project-member] Acquiring lock "35bf25a7-a3c2-40f1-b415-42d6167b0a1c" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61962) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1058.082417] env[61962]: DEBUG oslo_concurrency.lockutils [None req-673cd958-9a31-4e2b-aba4-288eb9a7a931 tempest-ServersTestManualDisk-1747286721 tempest-ServersTestManualDisk-1747286721-project-member] Lock "35bf25a7-a3c2-40f1-b415-42d6167b0a1c" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=61962) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1081.542213] env[61962]: WARNING oslo_vmware.rw_handles [None req-4f0380d0-0d3b-4e8d-a9a6-09d47c765b7b tempest-VolumesAssistedSnapshotsTest-1134839840 tempest-VolumesAssistedSnapshotsTest-1134839840-project-member] Error occurred while reading the HTTP response.: http.client.RemoteDisconnected: Remote end closed connection without response [ 1081.542213] env[61962]: ERROR oslo_vmware.rw_handles Traceback (most recent call last): [ 1081.542213] env[61962]: ERROR oslo_vmware.rw_handles File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py", line 283, in close [ 1081.542213] env[61962]: ERROR oslo_vmware.rw_handles self._conn.getresponse() [ 1081.542213] env[61962]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 1375, in getresponse [ 1081.542213] env[61962]: ERROR oslo_vmware.rw_handles response.begin() [ 1081.542213] env[61962]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 318, in begin [ 1081.542213] env[61962]: ERROR oslo_vmware.rw_handles version, status, reason = self._read_status() [ 1081.542213] env[61962]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 287, in _read_status [ 1081.542213] env[61962]: ERROR oslo_vmware.rw_handles raise RemoteDisconnected("Remote end closed connection without" [ 1081.542213] env[61962]: ERROR oslo_vmware.rw_handles http.client.RemoteDisconnected: Remote end closed connection without response [ 1081.542213] env[61962]: ERROR oslo_vmware.rw_handles [ 1081.543050] env[61962]: DEBUG nova.virt.vmwareapi.images [None req-4f0380d0-0d3b-4e8d-a9a6-09d47c765b7b tempest-VolumesAssistedSnapshotsTest-1134839840 tempest-VolumesAssistedSnapshotsTest-1134839840-project-member] [instance: 295064f0-9677-4287-877a-ef2a33a87fef] Downloaded image file data f684bb17-3ab2-4bd5-a19e-4c36c57d0ebe to vmware_temp/5fe03819-a358-4795-888b-e42be739abf5/f684bb17-3ab2-4bd5-a19e-4c36c57d0ebe/tmp-sparse.vmdk on the data store datastore2 {{(pid=61962) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:258}} [ 1081.545100] env[61962]: DEBUG nova.virt.vmwareapi.vmops [None req-4f0380d0-0d3b-4e8d-a9a6-09d47c765b7b tempest-VolumesAssistedSnapshotsTest-1134839840 tempest-VolumesAssistedSnapshotsTest-1134839840-project-member] [instance: 295064f0-9677-4287-877a-ef2a33a87fef] Caching image {{(pid=61962) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 1081.545371] env[61962]: DEBUG nova.virt.vmwareapi.vm_util [None req-4f0380d0-0d3b-4e8d-a9a6-09d47c765b7b tempest-VolumesAssistedSnapshotsTest-1134839840 tempest-VolumesAssistedSnapshotsTest-1134839840-project-member] Copying Virtual Disk [datastore2] vmware_temp/5fe03819-a358-4795-888b-e42be739abf5/f684bb17-3ab2-4bd5-a19e-4c36c57d0ebe/tmp-sparse.vmdk to [datastore2] vmware_temp/5fe03819-a358-4795-888b-e42be739abf5/f684bb17-3ab2-4bd5-a19e-4c36c57d0ebe/f684bb17-3ab2-4bd5-a19e-4c36c57d0ebe.vmdk {{(pid=61962) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 1081.545714] env[61962]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-d2044b91-ca99-4592-a3ec-f472ac61406b {{(pid=61962) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1081.557390] env[61962]: DEBUG oslo_vmware.api [None req-4f0380d0-0d3b-4e8d-a9a6-09d47c765b7b tempest-VolumesAssistedSnapshotsTest-1134839840 tempest-VolumesAssistedSnapshotsTest-1134839840-project-member] Waiting for the task: (returnval){ [ 1081.557390] env[61962]: value = "task-4891998" [ 1081.557390] env[61962]: _type = "Task" [ 1081.557390] env[61962]: } to complete. {{(pid=61962) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1081.569642] env[61962]: DEBUG oslo_vmware.api [None req-4f0380d0-0d3b-4e8d-a9a6-09d47c765b7b tempest-VolumesAssistedSnapshotsTest-1134839840 tempest-VolumesAssistedSnapshotsTest-1134839840-project-member] Task: {'id': task-4891998, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61962) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1082.067865] env[61962]: DEBUG oslo_vmware.exceptions [None req-4f0380d0-0d3b-4e8d-a9a6-09d47c765b7b tempest-VolumesAssistedSnapshotsTest-1134839840 tempest-VolumesAssistedSnapshotsTest-1134839840-project-member] Fault InvalidArgument not matched. {{(pid=61962) get_fault_class /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/exceptions.py:290}} [ 1082.069340] env[61962]: DEBUG oslo_concurrency.lockutils [None req-4f0380d0-0d3b-4e8d-a9a6-09d47c765b7b tempest-VolumesAssistedSnapshotsTest-1134839840 tempest-VolumesAssistedSnapshotsTest-1134839840-project-member] Releasing lock "[datastore2] devstack-image-cache_base/f684bb17-3ab2-4bd5-a19e-4c36c57d0ebe/f684bb17-3ab2-4bd5-a19e-4c36c57d0ebe.vmdk" {{(pid=61962) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1082.069340] env[61962]: ERROR nova.compute.manager [None req-4f0380d0-0d3b-4e8d-a9a6-09d47c765b7b tempest-VolumesAssistedSnapshotsTest-1134839840 tempest-VolumesAssistedSnapshotsTest-1134839840-project-member] [instance: 295064f0-9677-4287-877a-ef2a33a87fef] Instance failed to spawn: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1082.069340] env[61962]: Faults: ['InvalidArgument'] [ 1082.069340] env[61962]: ERROR nova.compute.manager [instance: 295064f0-9677-4287-877a-ef2a33a87fef] Traceback (most recent call last): [ 1082.069340] env[61962]: ERROR nova.compute.manager [instance: 295064f0-9677-4287-877a-ef2a33a87fef] File "/opt/stack/nova/nova/compute/manager.py", line 2886, in _build_resources [ 1082.069340] env[61962]: ERROR nova.compute.manager [instance: 295064f0-9677-4287-877a-ef2a33a87fef] yield resources [ 1082.069340] env[61962]: ERROR nova.compute.manager [instance: 295064f0-9677-4287-877a-ef2a33a87fef] File "/opt/stack/nova/nova/compute/manager.py", line 2633, in _build_and_run_instance [ 1082.069340] env[61962]: ERROR nova.compute.manager [instance: 295064f0-9677-4287-877a-ef2a33a87fef] self.driver.spawn(context, instance, image_meta, [ 1082.069340] env[61962]: ERROR nova.compute.manager [instance: 295064f0-9677-4287-877a-ef2a33a87fef] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 1082.069340] env[61962]: ERROR nova.compute.manager [instance: 295064f0-9677-4287-877a-ef2a33a87fef] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1082.069814] env[61962]: ERROR nova.compute.manager [instance: 295064f0-9677-4287-877a-ef2a33a87fef] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 1082.069814] env[61962]: ERROR nova.compute.manager [instance: 295064f0-9677-4287-877a-ef2a33a87fef] self._fetch_image_if_missing(context, vi) [ 1082.069814] env[61962]: ERROR nova.compute.manager [instance: 295064f0-9677-4287-877a-ef2a33a87fef] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 1082.069814] env[61962]: ERROR nova.compute.manager [instance: 295064f0-9677-4287-877a-ef2a33a87fef] image_cache(vi, tmp_image_ds_loc) [ 1082.069814] env[61962]: ERROR nova.compute.manager [instance: 295064f0-9677-4287-877a-ef2a33a87fef] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 1082.069814] env[61962]: ERROR nova.compute.manager [instance: 295064f0-9677-4287-877a-ef2a33a87fef] vm_util.copy_virtual_disk( [ 1082.069814] env[61962]: ERROR nova.compute.manager [instance: 295064f0-9677-4287-877a-ef2a33a87fef] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 1082.069814] env[61962]: ERROR nova.compute.manager [instance: 295064f0-9677-4287-877a-ef2a33a87fef] session._wait_for_task(vmdk_copy_task) [ 1082.069814] env[61962]: ERROR nova.compute.manager [instance: 295064f0-9677-4287-877a-ef2a33a87fef] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 1082.069814] env[61962]: ERROR nova.compute.manager [instance: 295064f0-9677-4287-877a-ef2a33a87fef] return self.wait_for_task(task_ref) [ 1082.069814] env[61962]: ERROR nova.compute.manager [instance: 295064f0-9677-4287-877a-ef2a33a87fef] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 1082.069814] env[61962]: ERROR nova.compute.manager [instance: 295064f0-9677-4287-877a-ef2a33a87fef] return evt.wait() [ 1082.069814] env[61962]: ERROR nova.compute.manager [instance: 295064f0-9677-4287-877a-ef2a33a87fef] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 1082.070223] env[61962]: ERROR nova.compute.manager [instance: 295064f0-9677-4287-877a-ef2a33a87fef] result = hub.switch() [ 1082.070223] env[61962]: ERROR nova.compute.manager [instance: 295064f0-9677-4287-877a-ef2a33a87fef] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 1082.070223] env[61962]: ERROR nova.compute.manager [instance: 295064f0-9677-4287-877a-ef2a33a87fef] return self.greenlet.switch() [ 1082.070223] env[61962]: ERROR nova.compute.manager [instance: 295064f0-9677-4287-877a-ef2a33a87fef] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 1082.070223] env[61962]: ERROR nova.compute.manager [instance: 295064f0-9677-4287-877a-ef2a33a87fef] self.f(*self.args, **self.kw) [ 1082.070223] env[61962]: ERROR nova.compute.manager [instance: 295064f0-9677-4287-877a-ef2a33a87fef] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 1082.070223] env[61962]: ERROR nova.compute.manager [instance: 295064f0-9677-4287-877a-ef2a33a87fef] raise exceptions.translate_fault(task_info.error) [ 1082.070223] env[61962]: ERROR nova.compute.manager [instance: 295064f0-9677-4287-877a-ef2a33a87fef] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1082.070223] env[61962]: ERROR nova.compute.manager [instance: 295064f0-9677-4287-877a-ef2a33a87fef] Faults: ['InvalidArgument'] [ 1082.070223] env[61962]: ERROR nova.compute.manager [instance: 295064f0-9677-4287-877a-ef2a33a87fef] [ 1082.070223] env[61962]: INFO nova.compute.manager [None req-4f0380d0-0d3b-4e8d-a9a6-09d47c765b7b tempest-VolumesAssistedSnapshotsTest-1134839840 tempest-VolumesAssistedSnapshotsTest-1134839840-project-member] [instance: 295064f0-9677-4287-877a-ef2a33a87fef] Terminating instance [ 1082.071133] env[61962]: DEBUG oslo_concurrency.lockutils [None req-90db2dbd-038d-42b8-af4c-dd3300a68214 tempest-ServersV294TestFqdnHostnames-1341387601 tempest-ServersV294TestFqdnHostnames-1341387601-project-member] Acquired lock "[datastore2] devstack-image-cache_base/f684bb17-3ab2-4bd5-a19e-4c36c57d0ebe/f684bb17-3ab2-4bd5-a19e-4c36c57d0ebe.vmdk" {{(pid=61962) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1082.071339] env[61962]: DEBUG nova.virt.vmwareapi.ds_util [None req-90db2dbd-038d-42b8-af4c-dd3300a68214 tempest-ServersV294TestFqdnHostnames-1341387601 tempest-ServersV294TestFqdnHostnames-1341387601-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=61962) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1082.071600] env[61962]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-d0e16cda-c550-4862-a597-fa0e87860564 {{(pid=61962) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1082.074219] env[61962]: DEBUG nova.compute.manager [None req-4f0380d0-0d3b-4e8d-a9a6-09d47c765b7b tempest-VolumesAssistedSnapshotsTest-1134839840 tempest-VolumesAssistedSnapshotsTest-1134839840-project-member] [instance: 295064f0-9677-4287-877a-ef2a33a87fef] Start destroying the instance on the hypervisor. {{(pid=61962) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3142}} [ 1082.074466] env[61962]: DEBUG nova.virt.vmwareapi.vmops [None req-4f0380d0-0d3b-4e8d-a9a6-09d47c765b7b tempest-VolumesAssistedSnapshotsTest-1134839840 tempest-VolumesAssistedSnapshotsTest-1134839840-project-member] [instance: 295064f0-9677-4287-877a-ef2a33a87fef] Destroying instance {{(pid=61962) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1082.075166] env[61962]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-eafd8f13-eeb0-47c6-ae20-bd4c5737f93c {{(pid=61962) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1082.082654] env[61962]: DEBUG nova.virt.vmwareapi.vmops [None req-4f0380d0-0d3b-4e8d-a9a6-09d47c765b7b tempest-VolumesAssistedSnapshotsTest-1134839840 tempest-VolumesAssistedSnapshotsTest-1134839840-project-member] [instance: 295064f0-9677-4287-877a-ef2a33a87fef] Unregistering the VM {{(pid=61962) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 1082.082910] env[61962]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-48f9260f-174e-482a-b709-10c0d0e15e6e {{(pid=61962) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1082.085254] env[61962]: DEBUG nova.virt.vmwareapi.ds_util [None req-90db2dbd-038d-42b8-af4c-dd3300a68214 tempest-ServersV294TestFqdnHostnames-1341387601 tempest-ServersV294TestFqdnHostnames-1341387601-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=61962) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1082.085434] env[61962]: DEBUG nova.virt.vmwareapi.vmops [None req-90db2dbd-038d-42b8-af4c-dd3300a68214 tempest-ServersV294TestFqdnHostnames-1341387601 tempest-ServersV294TestFqdnHostnames-1341387601-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=61962) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 1082.086498] env[61962]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-d715458e-ace7-43ec-9d64-48d2b4d19292 {{(pid=61962) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1082.092076] env[61962]: DEBUG oslo_vmware.api [None req-90db2dbd-038d-42b8-af4c-dd3300a68214 tempest-ServersV294TestFqdnHostnames-1341387601 tempest-ServersV294TestFqdnHostnames-1341387601-project-member] Waiting for the task: (returnval){ [ 1082.092076] env[61962]: value = "session[5210918f-aadc-9b29-42fa-0929c2e42627]5265bc3d-9bee-5ea8-e69e-bfbae9dcfee1" [ 1082.092076] env[61962]: _type = "Task" [ 1082.092076] env[61962]: } to complete. {{(pid=61962) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1082.105096] env[61962]: DEBUG oslo_vmware.api [None req-90db2dbd-038d-42b8-af4c-dd3300a68214 tempest-ServersV294TestFqdnHostnames-1341387601 tempest-ServersV294TestFqdnHostnames-1341387601-project-member] Task: {'id': session[5210918f-aadc-9b29-42fa-0929c2e42627]5265bc3d-9bee-5ea8-e69e-bfbae9dcfee1, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61962) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1082.163511] env[61962]: DEBUG nova.virt.vmwareapi.vmops [None req-4f0380d0-0d3b-4e8d-a9a6-09d47c765b7b tempest-VolumesAssistedSnapshotsTest-1134839840 tempest-VolumesAssistedSnapshotsTest-1134839840-project-member] [instance: 295064f0-9677-4287-877a-ef2a33a87fef] Unregistered the VM {{(pid=61962) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 1082.163741] env[61962]: DEBUG nova.virt.vmwareapi.vmops [None req-4f0380d0-0d3b-4e8d-a9a6-09d47c765b7b tempest-VolumesAssistedSnapshotsTest-1134839840 tempest-VolumesAssistedSnapshotsTest-1134839840-project-member] [instance: 295064f0-9677-4287-877a-ef2a33a87fef] Deleting contents of the VM from datastore datastore2 {{(pid=61962) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 1082.163920] env[61962]: DEBUG nova.virt.vmwareapi.ds_util [None req-4f0380d0-0d3b-4e8d-a9a6-09d47c765b7b tempest-VolumesAssistedSnapshotsTest-1134839840 tempest-VolumesAssistedSnapshotsTest-1134839840-project-member] Deleting the datastore file [datastore2] 295064f0-9677-4287-877a-ef2a33a87fef {{(pid=61962) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1082.164303] env[61962]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-d10339ec-de7f-4e98-a338-d9c260567a1f {{(pid=61962) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1082.172942] env[61962]: DEBUG oslo_vmware.api [None req-4f0380d0-0d3b-4e8d-a9a6-09d47c765b7b tempest-VolumesAssistedSnapshotsTest-1134839840 tempest-VolumesAssistedSnapshotsTest-1134839840-project-member] Waiting for the task: (returnval){ [ 1082.172942] env[61962]: value = "task-4892000" [ 1082.172942] env[61962]: _type = "Task" [ 1082.172942] env[61962]: } to complete. {{(pid=61962) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1082.186676] env[61962]: DEBUG oslo_vmware.api [None req-4f0380d0-0d3b-4e8d-a9a6-09d47c765b7b tempest-VolumesAssistedSnapshotsTest-1134839840 tempest-VolumesAssistedSnapshotsTest-1134839840-project-member] Task: {'id': task-4892000, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61962) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1082.604140] env[61962]: DEBUG nova.virt.vmwareapi.vmops [None req-90db2dbd-038d-42b8-af4c-dd3300a68214 tempest-ServersV294TestFqdnHostnames-1341387601 tempest-ServersV294TestFqdnHostnames-1341387601-project-member] [instance: 6996289c-f881-4733-8943-98e3633e43ca] Preparing fetch location {{(pid=61962) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 1082.604552] env[61962]: DEBUG nova.virt.vmwareapi.ds_util [None req-90db2dbd-038d-42b8-af4c-dd3300a68214 tempest-ServersV294TestFqdnHostnames-1341387601 tempest-ServersV294TestFqdnHostnames-1341387601-project-member] Creating directory with path [datastore2] vmware_temp/8b899285-0b14-437c-b445-5034746ae90e/f684bb17-3ab2-4bd5-a19e-4c36c57d0ebe {{(pid=61962) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1082.604688] env[61962]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-5ed2c204-3646-4a25-b424-088cc46b4acd {{(pid=61962) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1082.619068] env[61962]: DEBUG nova.virt.vmwareapi.ds_util [None req-90db2dbd-038d-42b8-af4c-dd3300a68214 tempest-ServersV294TestFqdnHostnames-1341387601 tempest-ServersV294TestFqdnHostnames-1341387601-project-member] Created directory with path [datastore2] vmware_temp/8b899285-0b14-437c-b445-5034746ae90e/f684bb17-3ab2-4bd5-a19e-4c36c57d0ebe {{(pid=61962) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1082.619243] env[61962]: DEBUG nova.virt.vmwareapi.vmops [None req-90db2dbd-038d-42b8-af4c-dd3300a68214 tempest-ServersV294TestFqdnHostnames-1341387601 tempest-ServersV294TestFqdnHostnames-1341387601-project-member] [instance: 6996289c-f881-4733-8943-98e3633e43ca] Fetch image to [datastore2] vmware_temp/8b899285-0b14-437c-b445-5034746ae90e/f684bb17-3ab2-4bd5-a19e-4c36c57d0ebe/tmp-sparse.vmdk {{(pid=61962) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 1082.619313] env[61962]: DEBUG nova.virt.vmwareapi.vmops [None req-90db2dbd-038d-42b8-af4c-dd3300a68214 tempest-ServersV294TestFqdnHostnames-1341387601 tempest-ServersV294TestFqdnHostnames-1341387601-project-member] [instance: 6996289c-f881-4733-8943-98e3633e43ca] Downloading image file data f684bb17-3ab2-4bd5-a19e-4c36c57d0ebe to [datastore2] vmware_temp/8b899285-0b14-437c-b445-5034746ae90e/f684bb17-3ab2-4bd5-a19e-4c36c57d0ebe/tmp-sparse.vmdk on the data store datastore2 {{(pid=61962) _fetch_image_as_file /opt/stack/nova/nova/virt/vmwareapi/vmops.py:399}} [ 1082.620187] env[61962]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c849dfcd-9e84-408d-b7f6-b3d26a8e1817 {{(pid=61962) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1082.628753] env[61962]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-88c0b0d7-c50f-4460-8bd6-8d36511b5ce3 {{(pid=61962) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1082.640540] env[61962]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ec20e036-d795-4c05-be5e-e18161ed0a48 {{(pid=61962) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1082.692235] env[61962]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-550cd7f5-634d-4b64-abc9-65104100258d {{(pid=61962) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1082.700340] env[61962]: DEBUG oslo_vmware.api [None req-4f0380d0-0d3b-4e8d-a9a6-09d47c765b7b tempest-VolumesAssistedSnapshotsTest-1134839840 tempest-VolumesAssistedSnapshotsTest-1134839840-project-member] Task: {'id': task-4892000, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.082922} completed successfully. {{(pid=61962) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1082.702062] env[61962]: DEBUG nova.virt.vmwareapi.ds_util [None req-4f0380d0-0d3b-4e8d-a9a6-09d47c765b7b tempest-VolumesAssistedSnapshotsTest-1134839840 tempest-VolumesAssistedSnapshotsTest-1134839840-project-member] Deleted the datastore file {{(pid=61962) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1082.702284] env[61962]: DEBUG nova.virt.vmwareapi.vmops [None req-4f0380d0-0d3b-4e8d-a9a6-09d47c765b7b tempest-VolumesAssistedSnapshotsTest-1134839840 tempest-VolumesAssistedSnapshotsTest-1134839840-project-member] [instance: 295064f0-9677-4287-877a-ef2a33a87fef] Deleted contents of the VM from datastore datastore2 {{(pid=61962) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 1082.702441] env[61962]: DEBUG nova.virt.vmwareapi.vmops [None req-4f0380d0-0d3b-4e8d-a9a6-09d47c765b7b tempest-VolumesAssistedSnapshotsTest-1134839840 tempest-VolumesAssistedSnapshotsTest-1134839840-project-member] [instance: 295064f0-9677-4287-877a-ef2a33a87fef] Instance destroyed {{(pid=61962) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1082.702639] env[61962]: INFO nova.compute.manager [None req-4f0380d0-0d3b-4e8d-a9a6-09d47c765b7b tempest-VolumesAssistedSnapshotsTest-1134839840 tempest-VolumesAssistedSnapshotsTest-1134839840-project-member] [instance: 295064f0-9677-4287-877a-ef2a33a87fef] Took 0.63 seconds to destroy the instance on the hypervisor. [ 1082.704565] env[61962]: DEBUG oslo_vmware.service [-] Invoking SessionManager.AcquireGenericServiceTicket with opID=oslo.vmware-8bdb472e-c484-4d12-9643-ab7f05b278bc {{(pid=61962) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1082.707539] env[61962]: DEBUG nova.compute.claims [None req-4f0380d0-0d3b-4e8d-a9a6-09d47c765b7b tempest-VolumesAssistedSnapshotsTest-1134839840 tempest-VolumesAssistedSnapshotsTest-1134839840-project-member] [instance: 295064f0-9677-4287-877a-ef2a33a87fef] Aborting claim: {{(pid=61962) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 1082.707718] env[61962]: DEBUG oslo_concurrency.lockutils [None req-4f0380d0-0d3b-4e8d-a9a6-09d47c765b7b tempest-VolumesAssistedSnapshotsTest-1134839840 tempest-VolumesAssistedSnapshotsTest-1134839840-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=61962) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1082.707930] env[61962]: DEBUG oslo_concurrency.lockutils [None req-4f0380d0-0d3b-4e8d-a9a6-09d47c765b7b tempest-VolumesAssistedSnapshotsTest-1134839840 tempest-VolumesAssistedSnapshotsTest-1134839840-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 0.000s {{(pid=61962) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1082.729407] env[61962]: DEBUG nova.virt.vmwareapi.images [None req-90db2dbd-038d-42b8-af4c-dd3300a68214 tempest-ServersV294TestFqdnHostnames-1341387601 tempest-ServersV294TestFqdnHostnames-1341387601-project-member] [instance: 6996289c-f881-4733-8943-98e3633e43ca] Downloading image file data f684bb17-3ab2-4bd5-a19e-4c36c57d0ebe to the data store datastore2 {{(pid=61962) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:245}} [ 1082.796316] env[61962]: DEBUG oslo_vmware.rw_handles [None req-90db2dbd-038d-42b8-af4c-dd3300a68214 tempest-ServersV294TestFqdnHostnames-1341387601 tempest-ServersV294TestFqdnHostnames-1341387601-project-member] Creating HTTP connection to write to file with size = 21318656 and URL = https://esx7c1n3.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/8b899285-0b14-437c-b445-5034746ae90e/f684bb17-3ab2-4bd5-a19e-4c36c57d0ebe/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=61962) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 1082.859068] env[61962]: DEBUG oslo_vmware.rw_handles [None req-90db2dbd-038d-42b8-af4c-dd3300a68214 tempest-ServersV294TestFqdnHostnames-1341387601 tempest-ServersV294TestFqdnHostnames-1341387601-project-member] Completed reading data from the image iterator. {{(pid=61962) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 1082.859068] env[61962]: DEBUG oslo_vmware.rw_handles [None req-90db2dbd-038d-42b8-af4c-dd3300a68214 tempest-ServersV294TestFqdnHostnames-1341387601 tempest-ServersV294TestFqdnHostnames-1341387601-project-member] Closing write handle for https://esx7c1n3.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/8b899285-0b14-437c-b445-5034746ae90e/f684bb17-3ab2-4bd5-a19e-4c36c57d0ebe/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=61962) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:281}} [ 1083.112278] env[61962]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b4c1537c-2e89-4291-a463-d7da245fe0a1 {{(pid=61962) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1083.120633] env[61962]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-acbca181-532f-4845-8c07-fe0527819e1a {{(pid=61962) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1083.151123] env[61962]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8a4b7222-884c-4b3e-981b-3ec3c89501bc {{(pid=61962) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1083.159304] env[61962]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-989d8cfc-6e5d-4ca8-80f1-da713607d9b2 {{(pid=61962) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1083.173275] env[61962]: DEBUG nova.compute.provider_tree [None req-4f0380d0-0d3b-4e8d-a9a6-09d47c765b7b tempest-VolumesAssistedSnapshotsTest-1134839840 tempest-VolumesAssistedSnapshotsTest-1134839840-project-member] Inventory has not changed in ProviderTree for provider: 5a20dc57-fddd-49ec-bab5-953a03eebaa1 {{(pid=61962) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1083.182251] env[61962]: DEBUG nova.scheduler.client.report [None req-4f0380d0-0d3b-4e8d-a9a6-09d47c765b7b tempest-VolumesAssistedSnapshotsTest-1134839840 tempest-VolumesAssistedSnapshotsTest-1134839840-project-member] Inventory has not changed for provider 5a20dc57-fddd-49ec-bab5-953a03eebaa1 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 96, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61962) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1083.200873] env[61962]: DEBUG oslo_concurrency.lockutils [None req-4f0380d0-0d3b-4e8d-a9a6-09d47c765b7b tempest-VolumesAssistedSnapshotsTest-1134839840 tempest-VolumesAssistedSnapshotsTest-1134839840-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 0.493s {{(pid=61962) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1083.201816] env[61962]: ERROR nova.compute.manager [None req-4f0380d0-0d3b-4e8d-a9a6-09d47c765b7b tempest-VolumesAssistedSnapshotsTest-1134839840 tempest-VolumesAssistedSnapshotsTest-1134839840-project-member] [instance: 295064f0-9677-4287-877a-ef2a33a87fef] Failed to build and run instance: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1083.201816] env[61962]: Faults: ['InvalidArgument'] [ 1083.201816] env[61962]: ERROR nova.compute.manager [instance: 295064f0-9677-4287-877a-ef2a33a87fef] Traceback (most recent call last): [ 1083.201816] env[61962]: ERROR nova.compute.manager [instance: 295064f0-9677-4287-877a-ef2a33a87fef] File "/opt/stack/nova/nova/compute/manager.py", line 2633, in _build_and_run_instance [ 1083.201816] env[61962]: ERROR nova.compute.manager [instance: 295064f0-9677-4287-877a-ef2a33a87fef] self.driver.spawn(context, instance, image_meta, [ 1083.201816] env[61962]: ERROR nova.compute.manager [instance: 295064f0-9677-4287-877a-ef2a33a87fef] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 1083.201816] env[61962]: ERROR nova.compute.manager [instance: 295064f0-9677-4287-877a-ef2a33a87fef] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1083.201816] env[61962]: ERROR nova.compute.manager [instance: 295064f0-9677-4287-877a-ef2a33a87fef] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 1083.201816] env[61962]: ERROR nova.compute.manager [instance: 295064f0-9677-4287-877a-ef2a33a87fef] self._fetch_image_if_missing(context, vi) [ 1083.201816] env[61962]: ERROR nova.compute.manager [instance: 295064f0-9677-4287-877a-ef2a33a87fef] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 1083.201816] env[61962]: ERROR nova.compute.manager [instance: 295064f0-9677-4287-877a-ef2a33a87fef] image_cache(vi, tmp_image_ds_loc) [ 1083.201816] env[61962]: ERROR nova.compute.manager [instance: 295064f0-9677-4287-877a-ef2a33a87fef] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 1083.202385] env[61962]: ERROR nova.compute.manager [instance: 295064f0-9677-4287-877a-ef2a33a87fef] vm_util.copy_virtual_disk( [ 1083.202385] env[61962]: ERROR nova.compute.manager [instance: 295064f0-9677-4287-877a-ef2a33a87fef] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 1083.202385] env[61962]: ERROR nova.compute.manager [instance: 295064f0-9677-4287-877a-ef2a33a87fef] session._wait_for_task(vmdk_copy_task) [ 1083.202385] env[61962]: ERROR nova.compute.manager [instance: 295064f0-9677-4287-877a-ef2a33a87fef] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 1083.202385] env[61962]: ERROR nova.compute.manager [instance: 295064f0-9677-4287-877a-ef2a33a87fef] return self.wait_for_task(task_ref) [ 1083.202385] env[61962]: ERROR nova.compute.manager [instance: 295064f0-9677-4287-877a-ef2a33a87fef] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 1083.202385] env[61962]: ERROR nova.compute.manager [instance: 295064f0-9677-4287-877a-ef2a33a87fef] return evt.wait() [ 1083.202385] env[61962]: ERROR nova.compute.manager [instance: 295064f0-9677-4287-877a-ef2a33a87fef] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 1083.202385] env[61962]: ERROR nova.compute.manager [instance: 295064f0-9677-4287-877a-ef2a33a87fef] result = hub.switch() [ 1083.202385] env[61962]: ERROR nova.compute.manager [instance: 295064f0-9677-4287-877a-ef2a33a87fef] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 1083.202385] env[61962]: ERROR nova.compute.manager [instance: 295064f0-9677-4287-877a-ef2a33a87fef] return self.greenlet.switch() [ 1083.202385] env[61962]: ERROR nova.compute.manager [instance: 295064f0-9677-4287-877a-ef2a33a87fef] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 1083.202385] env[61962]: ERROR nova.compute.manager [instance: 295064f0-9677-4287-877a-ef2a33a87fef] self.f(*self.args, **self.kw) [ 1083.202792] env[61962]: ERROR nova.compute.manager [instance: 295064f0-9677-4287-877a-ef2a33a87fef] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 1083.202792] env[61962]: ERROR nova.compute.manager [instance: 295064f0-9677-4287-877a-ef2a33a87fef] raise exceptions.translate_fault(task_info.error) [ 1083.202792] env[61962]: ERROR nova.compute.manager [instance: 295064f0-9677-4287-877a-ef2a33a87fef] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1083.202792] env[61962]: ERROR nova.compute.manager [instance: 295064f0-9677-4287-877a-ef2a33a87fef] Faults: ['InvalidArgument'] [ 1083.202792] env[61962]: ERROR nova.compute.manager [instance: 295064f0-9677-4287-877a-ef2a33a87fef] [ 1083.202792] env[61962]: DEBUG nova.compute.utils [None req-4f0380d0-0d3b-4e8d-a9a6-09d47c765b7b tempest-VolumesAssistedSnapshotsTest-1134839840 tempest-VolumesAssistedSnapshotsTest-1134839840-project-member] [instance: 295064f0-9677-4287-877a-ef2a33a87fef] VimFaultException {{(pid=61962) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 1083.203746] env[61962]: DEBUG nova.compute.manager [None req-4f0380d0-0d3b-4e8d-a9a6-09d47c765b7b tempest-VolumesAssistedSnapshotsTest-1134839840 tempest-VolumesAssistedSnapshotsTest-1134839840-project-member] [instance: 295064f0-9677-4287-877a-ef2a33a87fef] Build of instance 295064f0-9677-4287-877a-ef2a33a87fef was re-scheduled: A specified parameter was not correct: fileType [ 1083.203746] env[61962]: Faults: ['InvalidArgument'] {{(pid=61962) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2472}} [ 1083.204133] env[61962]: DEBUG nova.compute.manager [None req-4f0380d0-0d3b-4e8d-a9a6-09d47c765b7b tempest-VolumesAssistedSnapshotsTest-1134839840 tempest-VolumesAssistedSnapshotsTest-1134839840-project-member] [instance: 295064f0-9677-4287-877a-ef2a33a87fef] Unplugging VIFs for instance {{(pid=61962) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:2998}} [ 1083.204311] env[61962]: DEBUG nova.compute.manager [None req-4f0380d0-0d3b-4e8d-a9a6-09d47c765b7b tempest-VolumesAssistedSnapshotsTest-1134839840 tempest-VolumesAssistedSnapshotsTest-1134839840-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=61962) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3021}} [ 1083.204490] env[61962]: DEBUG nova.compute.manager [None req-4f0380d0-0d3b-4e8d-a9a6-09d47c765b7b tempest-VolumesAssistedSnapshotsTest-1134839840 tempest-VolumesAssistedSnapshotsTest-1134839840-project-member] [instance: 295064f0-9677-4287-877a-ef2a33a87fef] Deallocating network for instance {{(pid=61962) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2281}} [ 1083.204659] env[61962]: DEBUG nova.network.neutron [None req-4f0380d0-0d3b-4e8d-a9a6-09d47c765b7b tempest-VolumesAssistedSnapshotsTest-1134839840 tempest-VolumesAssistedSnapshotsTest-1134839840-project-member] [instance: 295064f0-9677-4287-877a-ef2a33a87fef] deallocate_for_instance() {{(pid=61962) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1083.615065] env[61962]: DEBUG nova.network.neutron [None req-4f0380d0-0d3b-4e8d-a9a6-09d47c765b7b tempest-VolumesAssistedSnapshotsTest-1134839840 tempest-VolumesAssistedSnapshotsTest-1134839840-project-member] [instance: 295064f0-9677-4287-877a-ef2a33a87fef] Updating instance_info_cache with network_info: [] {{(pid=61962) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1083.632751] env[61962]: INFO nova.compute.manager [None req-4f0380d0-0d3b-4e8d-a9a6-09d47c765b7b tempest-VolumesAssistedSnapshotsTest-1134839840 tempest-VolumesAssistedSnapshotsTest-1134839840-project-member] [instance: 295064f0-9677-4287-877a-ef2a33a87fef] Took 0.43 seconds to deallocate network for instance. [ 1083.764495] env[61962]: INFO nova.scheduler.client.report [None req-4f0380d0-0d3b-4e8d-a9a6-09d47c765b7b tempest-VolumesAssistedSnapshotsTest-1134839840 tempest-VolumesAssistedSnapshotsTest-1134839840-project-member] Deleted allocations for instance 295064f0-9677-4287-877a-ef2a33a87fef [ 1083.792144] env[61962]: DEBUG oslo_concurrency.lockutils [None req-4f0380d0-0d3b-4e8d-a9a6-09d47c765b7b tempest-VolumesAssistedSnapshotsTest-1134839840 tempest-VolumesAssistedSnapshotsTest-1134839840-project-member] Lock "295064f0-9677-4287-877a-ef2a33a87fef" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 505.085s {{(pid=61962) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1083.793490] env[61962]: DEBUG oslo_concurrency.lockutils [None req-86c2f272-a96f-407d-8b47-f5912c57652f tempest-VolumesAssistedSnapshotsTest-1134839840 tempest-VolumesAssistedSnapshotsTest-1134839840-project-member] Lock "295064f0-9677-4287-877a-ef2a33a87fef" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 306.649s {{(pid=61962) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1083.793770] env[61962]: DEBUG oslo_concurrency.lockutils [None req-86c2f272-a96f-407d-8b47-f5912c57652f tempest-VolumesAssistedSnapshotsTest-1134839840 tempest-VolumesAssistedSnapshotsTest-1134839840-project-member] Acquiring lock "295064f0-9677-4287-877a-ef2a33a87fef-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=61962) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1083.793995] env[61962]: DEBUG oslo_concurrency.lockutils [None req-86c2f272-a96f-407d-8b47-f5912c57652f tempest-VolumesAssistedSnapshotsTest-1134839840 tempest-VolumesAssistedSnapshotsTest-1134839840-project-member] Lock "295064f0-9677-4287-877a-ef2a33a87fef-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=61962) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1083.794607] env[61962]: DEBUG oslo_concurrency.lockutils [None req-86c2f272-a96f-407d-8b47-f5912c57652f tempest-VolumesAssistedSnapshotsTest-1134839840 tempest-VolumesAssistedSnapshotsTest-1134839840-project-member] Lock "295064f0-9677-4287-877a-ef2a33a87fef-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=61962) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1083.796638] env[61962]: INFO nova.compute.manager [None req-86c2f272-a96f-407d-8b47-f5912c57652f tempest-VolumesAssistedSnapshotsTest-1134839840 tempest-VolumesAssistedSnapshotsTest-1134839840-project-member] [instance: 295064f0-9677-4287-877a-ef2a33a87fef] Terminating instance [ 1083.799473] env[61962]: DEBUG nova.compute.manager [None req-86c2f272-a96f-407d-8b47-f5912c57652f tempest-VolumesAssistedSnapshotsTest-1134839840 tempest-VolumesAssistedSnapshotsTest-1134839840-project-member] [instance: 295064f0-9677-4287-877a-ef2a33a87fef] Start destroying the instance on the hypervisor. {{(pid=61962) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3142}} [ 1083.799563] env[61962]: DEBUG nova.virt.vmwareapi.vmops [None req-86c2f272-a96f-407d-8b47-f5912c57652f tempest-VolumesAssistedSnapshotsTest-1134839840 tempest-VolumesAssistedSnapshotsTest-1134839840-project-member] [instance: 295064f0-9677-4287-877a-ef2a33a87fef] Destroying instance {{(pid=61962) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1083.800111] env[61962]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-64a292fb-c70c-41c5-9ee3-b85140f57cf2 {{(pid=61962) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1083.810161] env[61962]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5330e439-164f-4fa9-a4e4-7e79647d5b16 {{(pid=61962) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1083.821586] env[61962]: DEBUG nova.compute.manager [None req-b93286ff-68e9-402b-952c-c77dbd100087 tempest-ImagesTestJSON-240028984 tempest-ImagesTestJSON-240028984-project-member] [instance: d89abe69-dd71-4cfe-b80b-3b2aeba0b8a9] Starting instance... {{(pid=61962) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2424}} [ 1083.848828] env[61962]: WARNING nova.virt.vmwareapi.vmops [None req-86c2f272-a96f-407d-8b47-f5912c57652f tempest-VolumesAssistedSnapshotsTest-1134839840 tempest-VolumesAssistedSnapshotsTest-1134839840-project-member] [instance: 295064f0-9677-4287-877a-ef2a33a87fef] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 295064f0-9677-4287-877a-ef2a33a87fef could not be found. [ 1083.849117] env[61962]: DEBUG nova.virt.vmwareapi.vmops [None req-86c2f272-a96f-407d-8b47-f5912c57652f tempest-VolumesAssistedSnapshotsTest-1134839840 tempest-VolumesAssistedSnapshotsTest-1134839840-project-member] [instance: 295064f0-9677-4287-877a-ef2a33a87fef] Instance destroyed {{(pid=61962) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1083.849334] env[61962]: INFO nova.compute.manager [None req-86c2f272-a96f-407d-8b47-f5912c57652f tempest-VolumesAssistedSnapshotsTest-1134839840 tempest-VolumesAssistedSnapshotsTest-1134839840-project-member] [instance: 295064f0-9677-4287-877a-ef2a33a87fef] Took 0.05 seconds to destroy the instance on the hypervisor. [ 1083.849662] env[61962]: DEBUG oslo.service.loopingcall [None req-86c2f272-a96f-407d-8b47-f5912c57652f tempest-VolumesAssistedSnapshotsTest-1134839840 tempest-VolumesAssistedSnapshotsTest-1134839840-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61962) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1083.849953] env[61962]: DEBUG nova.compute.manager [-] [instance: 295064f0-9677-4287-877a-ef2a33a87fef] Deallocating network for instance {{(pid=61962) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2281}} [ 1083.850069] env[61962]: DEBUG nova.network.neutron [-] [instance: 295064f0-9677-4287-877a-ef2a33a87fef] deallocate_for_instance() {{(pid=61962) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1083.882374] env[61962]: DEBUG nova.network.neutron [-] [instance: 295064f0-9677-4287-877a-ef2a33a87fef] Updating instance_info_cache with network_info: [] {{(pid=61962) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1083.885165] env[61962]: DEBUG oslo_concurrency.lockutils [None req-b93286ff-68e9-402b-952c-c77dbd100087 tempest-ImagesTestJSON-240028984 tempest-ImagesTestJSON-240028984-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61962) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1083.885400] env[61962]: DEBUG oslo_concurrency.lockutils [None req-b93286ff-68e9-402b-952c-c77dbd100087 tempest-ImagesTestJSON-240028984 tempest-ImagesTestJSON-240028984-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=61962) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1083.887223] env[61962]: INFO nova.compute.claims [None req-b93286ff-68e9-402b-952c-c77dbd100087 tempest-ImagesTestJSON-240028984 tempest-ImagesTestJSON-240028984-project-member] [instance: d89abe69-dd71-4cfe-b80b-3b2aeba0b8a9] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1083.891783] env[61962]: INFO nova.compute.manager [-] [instance: 295064f0-9677-4287-877a-ef2a33a87fef] Took 0.04 seconds to deallocate network for instance. [ 1084.001865] env[61962]: DEBUG oslo_concurrency.lockutils [None req-86c2f272-a96f-407d-8b47-f5912c57652f tempest-VolumesAssistedSnapshotsTest-1134839840 tempest-VolumesAssistedSnapshotsTest-1134839840-project-member] Lock "295064f0-9677-4287-877a-ef2a33a87fef" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 0.208s {{(pid=61962) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1084.238681] env[61962]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bffe6f3a-2208-4bec-a56a-a5e22baac195 {{(pid=61962) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1084.246959] env[61962]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d63ac86b-3aee-4d70-b320-891a9ddd68c4 {{(pid=61962) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1084.279222] env[61962]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c9ce88c7-4529-4a95-b9bd-3d364919840f {{(pid=61962) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1084.289273] env[61962]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-98bde274-9936-48c6-a810-0e1ce1ac1c5b {{(pid=61962) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1084.303700] env[61962]: DEBUG nova.compute.provider_tree [None req-b93286ff-68e9-402b-952c-c77dbd100087 tempest-ImagesTestJSON-240028984 tempest-ImagesTestJSON-240028984-project-member] Inventory has not changed in ProviderTree for provider: 5a20dc57-fddd-49ec-bab5-953a03eebaa1 {{(pid=61962) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1084.313551] env[61962]: DEBUG nova.scheduler.client.report [None req-b93286ff-68e9-402b-952c-c77dbd100087 tempest-ImagesTestJSON-240028984 tempest-ImagesTestJSON-240028984-project-member] Inventory has not changed for provider 5a20dc57-fddd-49ec-bab5-953a03eebaa1 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 96, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61962) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1084.332588] env[61962]: DEBUG oslo_concurrency.lockutils [None req-b93286ff-68e9-402b-952c-c77dbd100087 tempest-ImagesTestJSON-240028984 tempest-ImagesTestJSON-240028984-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.447s {{(pid=61962) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1084.333117] env[61962]: DEBUG nova.compute.manager [None req-b93286ff-68e9-402b-952c-c77dbd100087 tempest-ImagesTestJSON-240028984 tempest-ImagesTestJSON-240028984-project-member] [instance: d89abe69-dd71-4cfe-b80b-3b2aeba0b8a9] Start building networks asynchronously for instance. {{(pid=61962) _build_resources /opt/stack/nova/nova/compute/manager.py:2821}} [ 1084.368738] env[61962]: DEBUG nova.compute.utils [None req-b93286ff-68e9-402b-952c-c77dbd100087 tempest-ImagesTestJSON-240028984 tempest-ImagesTestJSON-240028984-project-member] Using /dev/sd instead of None {{(pid=61962) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1084.370345] env[61962]: DEBUG nova.compute.manager [None req-b93286ff-68e9-402b-952c-c77dbd100087 tempest-ImagesTestJSON-240028984 tempest-ImagesTestJSON-240028984-project-member] [instance: d89abe69-dd71-4cfe-b80b-3b2aeba0b8a9] Allocating IP information in the background. {{(pid=61962) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1973}} [ 1084.370517] env[61962]: DEBUG nova.network.neutron [None req-b93286ff-68e9-402b-952c-c77dbd100087 tempest-ImagesTestJSON-240028984 tempest-ImagesTestJSON-240028984-project-member] [instance: d89abe69-dd71-4cfe-b80b-3b2aeba0b8a9] allocate_for_instance() {{(pid=61962) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 1084.379751] env[61962]: DEBUG nova.compute.manager [None req-b93286ff-68e9-402b-952c-c77dbd100087 tempest-ImagesTestJSON-240028984 tempest-ImagesTestJSON-240028984-project-member] [instance: d89abe69-dd71-4cfe-b80b-3b2aeba0b8a9] Start building block device mappings for instance. {{(pid=61962) _build_resources /opt/stack/nova/nova/compute/manager.py:2856}} [ 1084.430300] env[61962]: DEBUG nova.policy [None req-b93286ff-68e9-402b-952c-c77dbd100087 tempest-ImagesTestJSON-240028984 tempest-ImagesTestJSON-240028984-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '84e0d67cbeef490f920225baaa7ef0bc', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'e58b68272c404983993a09ad12e8c03b', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61962) authorize /opt/stack/nova/nova/policy.py:203}} [ 1084.448495] env[61962]: DEBUG nova.compute.manager [None req-b93286ff-68e9-402b-952c-c77dbd100087 tempest-ImagesTestJSON-240028984 tempest-ImagesTestJSON-240028984-project-member] [instance: d89abe69-dd71-4cfe-b80b-3b2aeba0b8a9] Start spawning the instance on the hypervisor. {{(pid=61962) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2630}} [ 1084.475937] env[61962]: DEBUG nova.virt.hardware [None req-b93286ff-68e9-402b-952c-c77dbd100087 tempest-ImagesTestJSON-240028984 tempest-ImagesTestJSON-240028984-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-12-01T12:09:23Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=128,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-12-01T12:09:07Z,direct_url=,disk_format='vmdk',id=f684bb17-3ab2-4bd5-a19e-4c36c57d0ebe,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='eef556fb5c4f49b889491ae31a496de5',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-12-01T12:09:08Z,virtual_size=,visibility=), allow threads: False {{(pid=61962) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1084.476254] env[61962]: DEBUG nova.virt.hardware [None req-b93286ff-68e9-402b-952c-c77dbd100087 tempest-ImagesTestJSON-240028984 tempest-ImagesTestJSON-240028984-project-member] Flavor limits 0:0:0 {{(pid=61962) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1084.476349] env[61962]: DEBUG nova.virt.hardware [None req-b93286ff-68e9-402b-952c-c77dbd100087 tempest-ImagesTestJSON-240028984 tempest-ImagesTestJSON-240028984-project-member] Image limits 0:0:0 {{(pid=61962) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1084.476611] env[61962]: DEBUG nova.virt.hardware [None req-b93286ff-68e9-402b-952c-c77dbd100087 tempest-ImagesTestJSON-240028984 tempest-ImagesTestJSON-240028984-project-member] Flavor pref 0:0:0 {{(pid=61962) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1084.476800] env[61962]: DEBUG nova.virt.hardware [None req-b93286ff-68e9-402b-952c-c77dbd100087 tempest-ImagesTestJSON-240028984 tempest-ImagesTestJSON-240028984-project-member] Image pref 0:0:0 {{(pid=61962) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1084.476892] env[61962]: DEBUG nova.virt.hardware [None req-b93286ff-68e9-402b-952c-c77dbd100087 tempest-ImagesTestJSON-240028984 tempest-ImagesTestJSON-240028984-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61962) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1084.477087] env[61962]: DEBUG nova.virt.hardware [None req-b93286ff-68e9-402b-952c-c77dbd100087 tempest-ImagesTestJSON-240028984 tempest-ImagesTestJSON-240028984-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61962) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1084.477259] env[61962]: DEBUG nova.virt.hardware [None req-b93286ff-68e9-402b-952c-c77dbd100087 tempest-ImagesTestJSON-240028984 tempest-ImagesTestJSON-240028984-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61962) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1084.477435] env[61962]: DEBUG nova.virt.hardware [None req-b93286ff-68e9-402b-952c-c77dbd100087 tempest-ImagesTestJSON-240028984 tempest-ImagesTestJSON-240028984-project-member] Got 1 possible topologies {{(pid=61962) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1084.477605] env[61962]: DEBUG nova.virt.hardware [None req-b93286ff-68e9-402b-952c-c77dbd100087 tempest-ImagesTestJSON-240028984 tempest-ImagesTestJSON-240028984-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61962) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1084.477867] env[61962]: DEBUG nova.virt.hardware [None req-b93286ff-68e9-402b-952c-c77dbd100087 tempest-ImagesTestJSON-240028984 tempest-ImagesTestJSON-240028984-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61962) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1084.478819] env[61962]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-69224572-b66f-4518-9a25-7623e4b832ca {{(pid=61962) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1084.487589] env[61962]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-54f2fafb-f346-441a-9c5e-3d385ec9346f {{(pid=61962) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1084.928040] env[61962]: DEBUG nova.network.neutron [None req-b93286ff-68e9-402b-952c-c77dbd100087 tempest-ImagesTestJSON-240028984 tempest-ImagesTestJSON-240028984-project-member] [instance: d89abe69-dd71-4cfe-b80b-3b2aeba0b8a9] Successfully created port: 413a915c-af1d-408e-9407-93d615a7e5f4 {{(pid=61962) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1085.792809] env[61962]: DEBUG nova.compute.manager [req-c84970be-33fb-4aa3-be06-ece5e0d6e1e4 req-d7402e87-2af3-48b7-81db-836a489be470 service nova] [instance: d89abe69-dd71-4cfe-b80b-3b2aeba0b8a9] Received event network-vif-plugged-413a915c-af1d-408e-9407-93d615a7e5f4 {{(pid=61962) external_instance_event /opt/stack/nova/nova/compute/manager.py:11210}} [ 1085.793089] env[61962]: DEBUG oslo_concurrency.lockutils [req-c84970be-33fb-4aa3-be06-ece5e0d6e1e4 req-d7402e87-2af3-48b7-81db-836a489be470 service nova] Acquiring lock "d89abe69-dd71-4cfe-b80b-3b2aeba0b8a9-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=61962) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1085.793318] env[61962]: DEBUG oslo_concurrency.lockutils [req-c84970be-33fb-4aa3-be06-ece5e0d6e1e4 req-d7402e87-2af3-48b7-81db-836a489be470 service nova] Lock "d89abe69-dd71-4cfe-b80b-3b2aeba0b8a9-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=61962) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1085.793532] env[61962]: DEBUG oslo_concurrency.lockutils [req-c84970be-33fb-4aa3-be06-ece5e0d6e1e4 req-d7402e87-2af3-48b7-81db-836a489be470 service nova] Lock "d89abe69-dd71-4cfe-b80b-3b2aeba0b8a9-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=61962) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1085.793736] env[61962]: DEBUG nova.compute.manager [req-c84970be-33fb-4aa3-be06-ece5e0d6e1e4 req-d7402e87-2af3-48b7-81db-836a489be470 service nova] [instance: d89abe69-dd71-4cfe-b80b-3b2aeba0b8a9] No waiting events found dispatching network-vif-plugged-413a915c-af1d-408e-9407-93d615a7e5f4 {{(pid=61962) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1085.793864] env[61962]: WARNING nova.compute.manager [req-c84970be-33fb-4aa3-be06-ece5e0d6e1e4 req-d7402e87-2af3-48b7-81db-836a489be470 service nova] [instance: d89abe69-dd71-4cfe-b80b-3b2aeba0b8a9] Received unexpected event network-vif-plugged-413a915c-af1d-408e-9407-93d615a7e5f4 for instance with vm_state building and task_state spawning. [ 1085.935778] env[61962]: DEBUG nova.network.neutron [None req-b93286ff-68e9-402b-952c-c77dbd100087 tempest-ImagesTestJSON-240028984 tempest-ImagesTestJSON-240028984-project-member] [instance: d89abe69-dd71-4cfe-b80b-3b2aeba0b8a9] Successfully updated port: 413a915c-af1d-408e-9407-93d615a7e5f4 {{(pid=61962) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1085.948414] env[61962]: DEBUG oslo_concurrency.lockutils [None req-b93286ff-68e9-402b-952c-c77dbd100087 tempest-ImagesTestJSON-240028984 tempest-ImagesTestJSON-240028984-project-member] Acquiring lock "refresh_cache-d89abe69-dd71-4cfe-b80b-3b2aeba0b8a9" {{(pid=61962) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1085.948805] env[61962]: DEBUG oslo_concurrency.lockutils [None req-b93286ff-68e9-402b-952c-c77dbd100087 tempest-ImagesTestJSON-240028984 tempest-ImagesTestJSON-240028984-project-member] Acquired lock "refresh_cache-d89abe69-dd71-4cfe-b80b-3b2aeba0b8a9" {{(pid=61962) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1085.948805] env[61962]: DEBUG nova.network.neutron [None req-b93286ff-68e9-402b-952c-c77dbd100087 tempest-ImagesTestJSON-240028984 tempest-ImagesTestJSON-240028984-project-member] [instance: d89abe69-dd71-4cfe-b80b-3b2aeba0b8a9] Building network info cache for instance {{(pid=61962) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 1086.007221] env[61962]: DEBUG nova.network.neutron [None req-b93286ff-68e9-402b-952c-c77dbd100087 tempest-ImagesTestJSON-240028984 tempest-ImagesTestJSON-240028984-project-member] [instance: d89abe69-dd71-4cfe-b80b-3b2aeba0b8a9] Instance cache missing network info. {{(pid=61962) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 1086.292336] env[61962]: DEBUG nova.network.neutron [None req-b93286ff-68e9-402b-952c-c77dbd100087 tempest-ImagesTestJSON-240028984 tempest-ImagesTestJSON-240028984-project-member] [instance: d89abe69-dd71-4cfe-b80b-3b2aeba0b8a9] Updating instance_info_cache with network_info: [{"id": "413a915c-af1d-408e-9407-93d615a7e5f4", "address": "fa:16:3e:ba:3e:8e", "network": {"id": "4128bf2d-763a-4743-9aa9-47746ebfaa93", "bridge": "br-int", "label": "tempest-ImagesTestJSON-1442080107-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "e58b68272c404983993a09ad12e8c03b", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "be8bd197-4b2b-46e7-88ea-2554b0438584", "external-id": "nsx-vlan-transportzone-338", "segmentation_id": 338, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap413a915c-af", "ovs_interfaceid": "413a915c-af1d-408e-9407-93d615a7e5f4", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61962) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1086.311678] env[61962]: DEBUG oslo_concurrency.lockutils [None req-b93286ff-68e9-402b-952c-c77dbd100087 tempest-ImagesTestJSON-240028984 tempest-ImagesTestJSON-240028984-project-member] Releasing lock "refresh_cache-d89abe69-dd71-4cfe-b80b-3b2aeba0b8a9" {{(pid=61962) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1086.312189] env[61962]: DEBUG nova.compute.manager [None req-b93286ff-68e9-402b-952c-c77dbd100087 tempest-ImagesTestJSON-240028984 tempest-ImagesTestJSON-240028984-project-member] [instance: d89abe69-dd71-4cfe-b80b-3b2aeba0b8a9] Instance network_info: |[{"id": "413a915c-af1d-408e-9407-93d615a7e5f4", "address": "fa:16:3e:ba:3e:8e", "network": {"id": "4128bf2d-763a-4743-9aa9-47746ebfaa93", "bridge": "br-int", "label": "tempest-ImagesTestJSON-1442080107-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "e58b68272c404983993a09ad12e8c03b", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "be8bd197-4b2b-46e7-88ea-2554b0438584", "external-id": "nsx-vlan-transportzone-338", "segmentation_id": 338, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap413a915c-af", "ovs_interfaceid": "413a915c-af1d-408e-9407-93d615a7e5f4", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=61962) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1086.313228] env[61962]: DEBUG nova.virt.vmwareapi.vmops [None req-b93286ff-68e9-402b-952c-c77dbd100087 tempest-ImagesTestJSON-240028984 tempest-ImagesTestJSON-240028984-project-member] [instance: d89abe69-dd71-4cfe-b80b-3b2aeba0b8a9] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:ba:3e:8e', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'be8bd197-4b2b-46e7-88ea-2554b0438584', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '413a915c-af1d-408e-9407-93d615a7e5f4', 'vif_model': 'vmxnet3'}] {{(pid=61962) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1086.325377] env[61962]: DEBUG oslo.service.loopingcall [None req-b93286ff-68e9-402b-952c-c77dbd100087 tempest-ImagesTestJSON-240028984 tempest-ImagesTestJSON-240028984-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=61962) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1086.326653] env[61962]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: d89abe69-dd71-4cfe-b80b-3b2aeba0b8a9] Creating VM on the ESX host {{(pid=61962) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 1086.326653] env[61962]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-9bd48ccf-0e3b-4f8c-b13f-791740e65e6c {{(pid=61962) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1086.355877] env[61962]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1086.355877] env[61962]: value = "task-4892001" [ 1086.355877] env[61962]: _type = "Task" [ 1086.355877] env[61962]: } to complete. {{(pid=61962) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1086.365815] env[61962]: DEBUG oslo_vmware.api [-] Task: {'id': task-4892001, 'name': CreateVM_Task} progress is 0%. {{(pid=61962) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1086.866276] env[61962]: DEBUG oslo_vmware.api [-] Task: {'id': task-4892001, 'name': CreateVM_Task, 'duration_secs': 0.306353} completed successfully. {{(pid=61962) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1086.866539] env[61962]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: d89abe69-dd71-4cfe-b80b-3b2aeba0b8a9] Created VM on the ESX host {{(pid=61962) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 1086.867280] env[61962]: DEBUG oslo_concurrency.lockutils [None req-b93286ff-68e9-402b-952c-c77dbd100087 tempest-ImagesTestJSON-240028984 tempest-ImagesTestJSON-240028984-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/f684bb17-3ab2-4bd5-a19e-4c36c57d0ebe" {{(pid=61962) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1086.867443] env[61962]: DEBUG oslo_concurrency.lockutils [None req-b93286ff-68e9-402b-952c-c77dbd100087 tempest-ImagesTestJSON-240028984 tempest-ImagesTestJSON-240028984-project-member] Acquired lock "[datastore2] devstack-image-cache_base/f684bb17-3ab2-4bd5-a19e-4c36c57d0ebe" {{(pid=61962) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1086.867773] env[61962]: DEBUG oslo_concurrency.lockutils [None req-b93286ff-68e9-402b-952c-c77dbd100087 tempest-ImagesTestJSON-240028984 tempest-ImagesTestJSON-240028984-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/f684bb17-3ab2-4bd5-a19e-4c36c57d0ebe" {{(pid=61962) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1086.868043] env[61962]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-25abce02-56d1-4171-bbfc-19afe2eb6578 {{(pid=61962) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1086.873339] env[61962]: DEBUG oslo_vmware.api [None req-b93286ff-68e9-402b-952c-c77dbd100087 tempest-ImagesTestJSON-240028984 tempest-ImagesTestJSON-240028984-project-member] Waiting for the task: (returnval){ [ 1086.873339] env[61962]: value = "session[5210918f-aadc-9b29-42fa-0929c2e42627]52e99afb-cc52-2278-ed81-31cb5ad867ce" [ 1086.873339] env[61962]: _type = "Task" [ 1086.873339] env[61962]: } to complete. {{(pid=61962) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1086.881707] env[61962]: DEBUG oslo_vmware.api [None req-b93286ff-68e9-402b-952c-c77dbd100087 tempest-ImagesTestJSON-240028984 tempest-ImagesTestJSON-240028984-project-member] Task: {'id': session[5210918f-aadc-9b29-42fa-0929c2e42627]52e99afb-cc52-2278-ed81-31cb5ad867ce, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61962) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1087.384575] env[61962]: DEBUG oslo_concurrency.lockutils [None req-b93286ff-68e9-402b-952c-c77dbd100087 tempest-ImagesTestJSON-240028984 tempest-ImagesTestJSON-240028984-project-member] Releasing lock "[datastore2] devstack-image-cache_base/f684bb17-3ab2-4bd5-a19e-4c36c57d0ebe" {{(pid=61962) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1087.384965] env[61962]: DEBUG nova.virt.vmwareapi.vmops [None req-b93286ff-68e9-402b-952c-c77dbd100087 tempest-ImagesTestJSON-240028984 tempest-ImagesTestJSON-240028984-project-member] [instance: d89abe69-dd71-4cfe-b80b-3b2aeba0b8a9] Processing image f684bb17-3ab2-4bd5-a19e-4c36c57d0ebe {{(pid=61962) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1087.385195] env[61962]: DEBUG oslo_concurrency.lockutils [None req-b93286ff-68e9-402b-952c-c77dbd100087 tempest-ImagesTestJSON-240028984 tempest-ImagesTestJSON-240028984-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/f684bb17-3ab2-4bd5-a19e-4c36c57d0ebe/f684bb17-3ab2-4bd5-a19e-4c36c57d0ebe.vmdk" {{(pid=61962) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1087.819476] env[61962]: DEBUG nova.compute.manager [req-8a56dbad-d55c-47d0-89f1-53b1adc13663 req-78c8fc9a-f1ff-4d47-8504-2d9e57485beb service nova] [instance: d89abe69-dd71-4cfe-b80b-3b2aeba0b8a9] Received event network-changed-413a915c-af1d-408e-9407-93d615a7e5f4 {{(pid=61962) external_instance_event /opt/stack/nova/nova/compute/manager.py:11210}} [ 1087.819686] env[61962]: DEBUG nova.compute.manager [req-8a56dbad-d55c-47d0-89f1-53b1adc13663 req-78c8fc9a-f1ff-4d47-8504-2d9e57485beb service nova] [instance: d89abe69-dd71-4cfe-b80b-3b2aeba0b8a9] Refreshing instance network info cache due to event network-changed-413a915c-af1d-408e-9407-93d615a7e5f4. {{(pid=61962) external_instance_event /opt/stack/nova/nova/compute/manager.py:11215}} [ 1087.819922] env[61962]: DEBUG oslo_concurrency.lockutils [req-8a56dbad-d55c-47d0-89f1-53b1adc13663 req-78c8fc9a-f1ff-4d47-8504-2d9e57485beb service nova] Acquiring lock "refresh_cache-d89abe69-dd71-4cfe-b80b-3b2aeba0b8a9" {{(pid=61962) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1087.820049] env[61962]: DEBUG oslo_concurrency.lockutils [req-8a56dbad-d55c-47d0-89f1-53b1adc13663 req-78c8fc9a-f1ff-4d47-8504-2d9e57485beb service nova] Acquired lock "refresh_cache-d89abe69-dd71-4cfe-b80b-3b2aeba0b8a9" {{(pid=61962) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1087.820209] env[61962]: DEBUG nova.network.neutron [req-8a56dbad-d55c-47d0-89f1-53b1adc13663 req-78c8fc9a-f1ff-4d47-8504-2d9e57485beb service nova] [instance: d89abe69-dd71-4cfe-b80b-3b2aeba0b8a9] Refreshing network info cache for port 413a915c-af1d-408e-9407-93d615a7e5f4 {{(pid=61962) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 1088.139694] env[61962]: DEBUG nova.network.neutron [req-8a56dbad-d55c-47d0-89f1-53b1adc13663 req-78c8fc9a-f1ff-4d47-8504-2d9e57485beb service nova] [instance: d89abe69-dd71-4cfe-b80b-3b2aeba0b8a9] Updated VIF entry in instance network info cache for port 413a915c-af1d-408e-9407-93d615a7e5f4. {{(pid=61962) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 1088.140127] env[61962]: DEBUG nova.network.neutron [req-8a56dbad-d55c-47d0-89f1-53b1adc13663 req-78c8fc9a-f1ff-4d47-8504-2d9e57485beb service nova] [instance: d89abe69-dd71-4cfe-b80b-3b2aeba0b8a9] Updating instance_info_cache with network_info: [{"id": "413a915c-af1d-408e-9407-93d615a7e5f4", "address": "fa:16:3e:ba:3e:8e", "network": {"id": "4128bf2d-763a-4743-9aa9-47746ebfaa93", "bridge": "br-int", "label": "tempest-ImagesTestJSON-1442080107-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "e58b68272c404983993a09ad12e8c03b", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "be8bd197-4b2b-46e7-88ea-2554b0438584", "external-id": "nsx-vlan-transportzone-338", "segmentation_id": 338, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap413a915c-af", "ovs_interfaceid": "413a915c-af1d-408e-9407-93d615a7e5f4", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61962) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1088.153151] env[61962]: DEBUG oslo_concurrency.lockutils [req-8a56dbad-d55c-47d0-89f1-53b1adc13663 req-78c8fc9a-f1ff-4d47-8504-2d9e57485beb service nova] Releasing lock "refresh_cache-d89abe69-dd71-4cfe-b80b-3b2aeba0b8a9" {{(pid=61962) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1088.335632] env[61962]: DEBUG oslo_concurrency.lockutils [None req-555ff3a2-44c6-4017-8dd7-6f5fefd753e6 tempest-ImagesTestJSON-240028984 tempest-ImagesTestJSON-240028984-project-member] Acquiring lock "d89abe69-dd71-4cfe-b80b-3b2aeba0b8a9" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=61962) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1101.159830] env[61962]: DEBUG oslo_service.periodic_task [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Running periodic task ComputeManager.update_available_resource {{(pid=61962) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1101.173177] env[61962]: DEBUG oslo_concurrency.lockutils [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=61962) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1101.173177] env[61962]: DEBUG oslo_concurrency.lockutils [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=61962) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1101.173177] env[61962]: DEBUG oslo_concurrency.lockutils [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=61962) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1101.173177] env[61962]: DEBUG nova.compute.resource_tracker [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=61962) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 1101.174048] env[61962]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7d6d940b-144c-4b32-88fd-0835366fae42 {{(pid=61962) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1101.183366] env[61962]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0519556c-b6a2-4df5-9d2e-de0ce4402045 {{(pid=61962) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1101.199638] env[61962]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-457635b7-024c-444d-8890-542d6db7790c {{(pid=61962) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1101.206713] env[61962]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-85b7d62b-5134-4a14-b0a0-05e95d314a0c {{(pid=61962) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1101.236862] env[61962]: DEBUG nova.compute.resource_tracker [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=180575MB free_disk=96GB free_vcpus=48 pci_devices=None {{(pid=61962) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 1101.237050] env[61962]: DEBUG oslo_concurrency.lockutils [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=61962) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1101.237263] env[61962]: DEBUG oslo_concurrency.lockutils [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=61962) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1101.324280] env[61962]: DEBUG nova.compute.resource_tracker [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Instance 6996289c-f881-4733-8943-98e3633e43ca actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61962) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 1101.324438] env[61962]: DEBUG nova.compute.resource_tracker [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Instance 85c6b6af-f1b5-47ca-8e02-61c3669ec02b actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61962) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 1101.324565] env[61962]: DEBUG nova.compute.resource_tracker [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Instance 0aa340f7-a219-47f5-9d7d-dd4062643cf7 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61962) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 1101.324689] env[61962]: DEBUG nova.compute.resource_tracker [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Instance ff7d9da5-bc9a-41f2-a2e4-521d87b22117 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61962) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 1101.324812] env[61962]: DEBUG nova.compute.resource_tracker [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Instance bf82c44c-95ce-4e4c-a32f-444bba1ef5cd actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61962) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 1101.325011] env[61962]: DEBUG nova.compute.resource_tracker [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Instance 1dcafe3b-989b-4290-be54-ceccc62a323f actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61962) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 1101.325180] env[61962]: DEBUG nova.compute.resource_tracker [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Instance 454eebcc-ea18-4046-ba2e-01de1c8a954a actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61962) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 1101.325340] env[61962]: DEBUG nova.compute.resource_tracker [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Instance e76b5908-3bad-4c19-90e7-b15ca253ad80 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61962) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 1101.325419] env[61962]: DEBUG nova.compute.resource_tracker [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Instance 8ecd73a7-7f23-4994-85b9-479584831687 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61962) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 1101.325533] env[61962]: DEBUG nova.compute.resource_tracker [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Instance d89abe69-dd71-4cfe-b80b-3b2aeba0b8a9 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61962) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 1101.338814] env[61962]: DEBUG nova.compute.resource_tracker [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Instance 5572a1d3-5d2f-4af5-b9f6-ec57506e2069 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61962) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1767}} [ 1101.353806] env[61962]: DEBUG nova.compute.resource_tracker [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Instance 98c88a0d-07d4-4bec-8722-0afe732886ac has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61962) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1767}} [ 1101.365772] env[61962]: DEBUG nova.compute.resource_tracker [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Instance b42bffa2-bf04-4287-9ae8-8cbbe107893a has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61962) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1767}} [ 1101.378763] env[61962]: DEBUG nova.compute.resource_tracker [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Instance a47d03b4-71aa-43e9-bfa4-d2a07ea89fb8 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61962) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1767}} [ 1101.390774] env[61962]: DEBUG nova.compute.resource_tracker [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Instance 2a085633-9439-464b-b0c7-00e68ed46fb3 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61962) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1767}} [ 1101.402057] env[61962]: DEBUG nova.compute.resource_tracker [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Instance d970d822-12ab-43cc-8d34-abc6eff8a68d has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61962) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1767}} [ 1101.413743] env[61962]: DEBUG nova.compute.resource_tracker [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Instance 233cd3d2-df58-4826-9bb1-a93167d4cfa4 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61962) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1767}} [ 1101.427305] env[61962]: DEBUG nova.compute.resource_tracker [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Instance 56d0be20-ee15-4c11-afa6-1630ffed7bc4 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61962) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1767}} [ 1101.441026] env[61962]: DEBUG nova.compute.resource_tracker [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Instance 7dc97bc2-eaf6-4df0-bd77-d4ee263f3a48 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61962) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1767}} [ 1101.452400] env[61962]: DEBUG nova.compute.resource_tracker [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Instance b3143d91-7fe7-45cf-9b90-c84d989394a3 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61962) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1767}} [ 1101.464368] env[61962]: DEBUG nova.compute.resource_tracker [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Instance 35bf25a7-a3c2-40f1-b415-42d6167b0a1c has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61962) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1767}} [ 1101.464658] env[61962]: DEBUG nova.compute.resource_tracker [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Total usable vcpus: 48, total allocated vcpus: 10 {{(pid=61962) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 1101.464860] env[61962]: DEBUG nova.compute.resource_tracker [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=1792MB phys_disk=100GB used_disk=10GB total_vcpus=48 used_vcpus=10 pci_stats=[] stats={'failed_builds': '41', 'num_instances': '10', 'num_vm_building': '10', 'num_task_deleting': '10', 'num_os_type_None': '10', 'num_proj_b4cf0a2155644882b4bf42e69e210138': '1', 'io_workload': '10', 'num_proj_ed943c69c73d48e2b1b2bab311295d84': '2', 'num_proj_5481169540de4205a1ff7731132eb3a7': '1', 'num_proj_389fb8d4c2ce47829fe23a6ca60e9bdd': '1', 'num_proj_7738f0dd32484bf9978c5d006d088c3b': '1', 'num_proj_a31ea93dd586435ebb82c1dbfd14c2ad': '1', 'num_proj_3285f5bbd10d495e852d0ef0c0047847': '1', 'num_proj_8e79e89d66b44fa092d586334732c0be': '1', 'num_proj_e58b68272c404983993a09ad12e8c03b': '1'} {{(pid=61962) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 1101.775031] env[61962]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ab9b0c68-b6d4-49d9-954e-306fdd8f8c8d {{(pid=61962) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1101.783119] env[61962]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-01713b4b-740a-4be1-82b8-9e6253e2a28a {{(pid=61962) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1101.816977] env[61962]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bfcd04ef-a650-475d-a372-c19440cdf849 {{(pid=61962) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1101.825645] env[61962]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a0fd1bcc-ae13-4ded-871e-d68ae993afba {{(pid=61962) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1101.839485] env[61962]: DEBUG nova.compute.provider_tree [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Inventory has not changed in ProviderTree for provider: 5a20dc57-fddd-49ec-bab5-953a03eebaa1 {{(pid=61962) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1101.850884] env[61962]: DEBUG nova.scheduler.client.report [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Inventory has not changed for provider 5a20dc57-fddd-49ec-bab5-953a03eebaa1 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 96, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61962) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1101.866845] env[61962]: DEBUG nova.compute.resource_tracker [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=61962) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 1101.866845] env[61962]: DEBUG oslo_concurrency.lockutils [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 0.629s {{(pid=61962) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1102.861300] env[61962]: DEBUG oslo_service.periodic_task [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Running periodic task ComputeManager._sync_scheduler_instance_info {{(pid=61962) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1102.888396] env[61962]: DEBUG oslo_service.periodic_task [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=61962) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1102.888546] env[61962]: DEBUG nova.compute.manager [None req-ba337279-4320-40ca-b616-7e590432f203 None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=61962) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10626}} [ 1105.181263] env[61962]: DEBUG oslo_service.periodic_task [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=61962) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1106.159911] env[61962]: DEBUG oslo_service.periodic_task [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=61962) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1108.163024] env[61962]: DEBUG oslo_service.periodic_task [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=61962) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1108.163024] env[61962]: DEBUG nova.compute.manager [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Starting heal instance info cache {{(pid=61962) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10007}} [ 1108.163024] env[61962]: DEBUG nova.compute.manager [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Rebuilding the list of instances to heal {{(pid=61962) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10011}} [ 1108.189586] env[61962]: DEBUG nova.compute.manager [None req-ba337279-4320-40ca-b616-7e590432f203 None None] [instance: 6996289c-f881-4733-8943-98e3633e43ca] Skipping network cache update for instance because it is Building. {{(pid=61962) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10020}} [ 1108.190034] env[61962]: DEBUG nova.compute.manager [None req-ba337279-4320-40ca-b616-7e590432f203 None None] [instance: 85c6b6af-f1b5-47ca-8e02-61c3669ec02b] Skipping network cache update for instance because it is Building. {{(pid=61962) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10020}} [ 1108.190034] env[61962]: DEBUG nova.compute.manager [None req-ba337279-4320-40ca-b616-7e590432f203 None None] [instance: 0aa340f7-a219-47f5-9d7d-dd4062643cf7] Skipping network cache update for instance because it is Building. {{(pid=61962) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10020}} [ 1108.190167] env[61962]: DEBUG nova.compute.manager [None req-ba337279-4320-40ca-b616-7e590432f203 None None] [instance: ff7d9da5-bc9a-41f2-a2e4-521d87b22117] Skipping network cache update for instance because it is Building. {{(pid=61962) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10020}} [ 1108.192709] env[61962]: DEBUG nova.compute.manager [None req-ba337279-4320-40ca-b616-7e590432f203 None None] [instance: bf82c44c-95ce-4e4c-a32f-444bba1ef5cd] Skipping network cache update for instance because it is Building. {{(pid=61962) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10020}} [ 1108.192709] env[61962]: DEBUG nova.compute.manager [None req-ba337279-4320-40ca-b616-7e590432f203 None None] [instance: 1dcafe3b-989b-4290-be54-ceccc62a323f] Skipping network cache update for instance because it is Building. {{(pid=61962) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10020}} [ 1108.192709] env[61962]: DEBUG nova.compute.manager [None req-ba337279-4320-40ca-b616-7e590432f203 None None] [instance: 454eebcc-ea18-4046-ba2e-01de1c8a954a] Skipping network cache update for instance because it is Building. {{(pid=61962) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10020}} [ 1108.192709] env[61962]: DEBUG nova.compute.manager [None req-ba337279-4320-40ca-b616-7e590432f203 None None] [instance: e76b5908-3bad-4c19-90e7-b15ca253ad80] Skipping network cache update for instance because it is Building. {{(pid=61962) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10020}} [ 1108.192709] env[61962]: DEBUG nova.compute.manager [None req-ba337279-4320-40ca-b616-7e590432f203 None None] [instance: 8ecd73a7-7f23-4994-85b9-479584831687] Skipping network cache update for instance because it is Building. {{(pid=61962) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10020}} [ 1108.193014] env[61962]: DEBUG nova.compute.manager [None req-ba337279-4320-40ca-b616-7e590432f203 None None] [instance: d89abe69-dd71-4cfe-b80b-3b2aeba0b8a9] Skipping network cache update for instance because it is Building. {{(pid=61962) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10020}} [ 1108.193014] env[61962]: DEBUG nova.compute.manager [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Didn't find any instances for network info cache update. {{(pid=61962) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10093}} [ 1109.158722] env[61962]: DEBUG oslo_service.periodic_task [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=61962) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1109.158998] env[61962]: DEBUG oslo_service.periodic_task [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Running periodic task ComputeManager._run_pending_deletes {{(pid=61962) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1109.159186] env[61962]: DEBUG nova.compute.manager [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Cleaning up deleted instances {{(pid=61962) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11307}} [ 1109.171761] env[61962]: DEBUG nova.compute.manager [None req-ba337279-4320-40ca-b616-7e590432f203 None None] There are 0 instances to clean {{(pid=61962) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11316}} [ 1109.403614] env[61962]: DEBUG oslo_concurrency.lockutils [None req-e958cfa4-b00d-4980-bc5a-c65b69f7ca2e tempest-FloatingIPsAssociationNegativeTestJSON-1886607009 tempest-FloatingIPsAssociationNegativeTestJSON-1886607009-project-member] Acquiring lock "1f7429fe-e6f3-4b0a-bae8-38ead4607ae7" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61962) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1109.403883] env[61962]: DEBUG oslo_concurrency.lockutils [None req-e958cfa4-b00d-4980-bc5a-c65b69f7ca2e tempest-FloatingIPsAssociationNegativeTestJSON-1886607009 tempest-FloatingIPsAssociationNegativeTestJSON-1886607009-project-member] Lock "1f7429fe-e6f3-4b0a-bae8-38ead4607ae7" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61962) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1110.158513] env[61962]: DEBUG oslo_service.periodic_task [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=61962) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1110.158888] env[61962]: DEBUG oslo_service.periodic_task [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=61962) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1110.159162] env[61962]: DEBUG oslo_service.periodic_task [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Running periodic task ComputeManager._cleanup_expired_console_auth_tokens {{(pid=61962) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1111.167894] env[61962]: DEBUG oslo_service.periodic_task [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=61962) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1111.168276] env[61962]: DEBUG oslo_service.periodic_task [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Running periodic task ComputeManager._cleanup_incomplete_migrations {{(pid=61962) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1111.168276] env[61962]: DEBUG nova.compute.manager [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Cleaning up deleted instances with incomplete migration {{(pid=61962) _cleanup_incomplete_migrations /opt/stack/nova/nova/compute/manager.py:11345}} [ 1130.468084] env[61962]: DEBUG oslo_service.periodic_task [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Running periodic task ComputeManager._sync_power_states {{(pid=61962) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1130.493071] env[61962]: DEBUG nova.virt.vmwareapi.vmops [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Getting list of instances from cluster (obj){ [ 1130.493071] env[61962]: value = "domain-c8" [ 1130.493071] env[61962]: _type = "ClusterComputeResource" [ 1130.493071] env[61962]: } {{(pid=61962) list_instances /opt/stack/nova/nova/virt/vmwareapi/vmops.py:2122}} [ 1130.493968] env[61962]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9e324406-5547-41ae-ad8c-1f394fa861ee {{(pid=61962) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1130.514082] env[61962]: DEBUG nova.virt.vmwareapi.vmops [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Got total of 10 instances {{(pid=61962) list_instances /opt/stack/nova/nova/virt/vmwareapi/vmops.py:2131}} [ 1130.514271] env[61962]: DEBUG nova.compute.manager [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Triggering sync for uuid 6996289c-f881-4733-8943-98e3633e43ca {{(pid=61962) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10417}} [ 1130.514470] env[61962]: DEBUG nova.compute.manager [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Triggering sync for uuid 85c6b6af-f1b5-47ca-8e02-61c3669ec02b {{(pid=61962) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10417}} [ 1130.514633] env[61962]: DEBUG nova.compute.manager [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Triggering sync for uuid 0aa340f7-a219-47f5-9d7d-dd4062643cf7 {{(pid=61962) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10417}} [ 1130.514791] env[61962]: DEBUG nova.compute.manager [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Triggering sync for uuid ff7d9da5-bc9a-41f2-a2e4-521d87b22117 {{(pid=61962) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10417}} [ 1130.514947] env[61962]: DEBUG nova.compute.manager [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Triggering sync for uuid bf82c44c-95ce-4e4c-a32f-444bba1ef5cd {{(pid=61962) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10417}} [ 1130.515118] env[61962]: DEBUG nova.compute.manager [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Triggering sync for uuid 1dcafe3b-989b-4290-be54-ceccc62a323f {{(pid=61962) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10417}} [ 1130.515278] env[61962]: DEBUG nova.compute.manager [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Triggering sync for uuid 454eebcc-ea18-4046-ba2e-01de1c8a954a {{(pid=61962) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10417}} [ 1130.515428] env[61962]: DEBUG nova.compute.manager [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Triggering sync for uuid e76b5908-3bad-4c19-90e7-b15ca253ad80 {{(pid=61962) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10417}} [ 1130.515580] env[61962]: DEBUG nova.compute.manager [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Triggering sync for uuid 8ecd73a7-7f23-4994-85b9-479584831687 {{(pid=61962) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10417}} [ 1130.515730] env[61962]: DEBUG nova.compute.manager [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Triggering sync for uuid d89abe69-dd71-4cfe-b80b-3b2aeba0b8a9 {{(pid=61962) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10417}} [ 1130.516093] env[61962]: DEBUG oslo_concurrency.lockutils [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Acquiring lock "6996289c-f881-4733-8943-98e3633e43ca" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=61962) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1130.516346] env[61962]: DEBUG oslo_concurrency.lockutils [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Acquiring lock "85c6b6af-f1b5-47ca-8e02-61c3669ec02b" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=61962) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1130.516554] env[61962]: DEBUG oslo_concurrency.lockutils [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Acquiring lock "0aa340f7-a219-47f5-9d7d-dd4062643cf7" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=61962) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1130.516751] env[61962]: DEBUG oslo_concurrency.lockutils [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Acquiring lock "ff7d9da5-bc9a-41f2-a2e4-521d87b22117" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=61962) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1130.516961] env[61962]: DEBUG oslo_concurrency.lockutils [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Acquiring lock "bf82c44c-95ce-4e4c-a32f-444bba1ef5cd" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=61962) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1130.517231] env[61962]: DEBUG oslo_concurrency.lockutils [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Acquiring lock "1dcafe3b-989b-4290-be54-ceccc62a323f" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=61962) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1130.517472] env[61962]: DEBUG oslo_concurrency.lockutils [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Acquiring lock "454eebcc-ea18-4046-ba2e-01de1c8a954a" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=61962) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1130.517574] env[61962]: DEBUG oslo_concurrency.lockutils [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Acquiring lock "e76b5908-3bad-4c19-90e7-b15ca253ad80" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=61962) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1130.517760] env[61962]: DEBUG oslo_concurrency.lockutils [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Acquiring lock "8ecd73a7-7f23-4994-85b9-479584831687" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=61962) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1130.517953] env[61962]: DEBUG oslo_concurrency.lockutils [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Acquiring lock "d89abe69-dd71-4cfe-b80b-3b2aeba0b8a9" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=61962) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1131.561308] env[61962]: WARNING oslo_vmware.rw_handles [None req-90db2dbd-038d-42b8-af4c-dd3300a68214 tempest-ServersV294TestFqdnHostnames-1341387601 tempest-ServersV294TestFqdnHostnames-1341387601-project-member] Error occurred while reading the HTTP response.: http.client.RemoteDisconnected: Remote end closed connection without response [ 1131.561308] env[61962]: ERROR oslo_vmware.rw_handles Traceback (most recent call last): [ 1131.561308] env[61962]: ERROR oslo_vmware.rw_handles File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py", line 283, in close [ 1131.561308] env[61962]: ERROR oslo_vmware.rw_handles self._conn.getresponse() [ 1131.561308] env[61962]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 1375, in getresponse [ 1131.561308] env[61962]: ERROR oslo_vmware.rw_handles response.begin() [ 1131.561308] env[61962]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 318, in begin [ 1131.561308] env[61962]: ERROR oslo_vmware.rw_handles version, status, reason = self._read_status() [ 1131.561308] env[61962]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 287, in _read_status [ 1131.561308] env[61962]: ERROR oslo_vmware.rw_handles raise RemoteDisconnected("Remote end closed connection without" [ 1131.561308] env[61962]: ERROR oslo_vmware.rw_handles http.client.RemoteDisconnected: Remote end closed connection without response [ 1131.561308] env[61962]: ERROR oslo_vmware.rw_handles [ 1131.562113] env[61962]: DEBUG nova.virt.vmwareapi.images [None req-90db2dbd-038d-42b8-af4c-dd3300a68214 tempest-ServersV294TestFqdnHostnames-1341387601 tempest-ServersV294TestFqdnHostnames-1341387601-project-member] [instance: 6996289c-f881-4733-8943-98e3633e43ca] Downloaded image file data f684bb17-3ab2-4bd5-a19e-4c36c57d0ebe to vmware_temp/8b899285-0b14-437c-b445-5034746ae90e/f684bb17-3ab2-4bd5-a19e-4c36c57d0ebe/tmp-sparse.vmdk on the data store datastore2 {{(pid=61962) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:258}} [ 1131.563864] env[61962]: DEBUG nova.virt.vmwareapi.vmops [None req-90db2dbd-038d-42b8-af4c-dd3300a68214 tempest-ServersV294TestFqdnHostnames-1341387601 tempest-ServersV294TestFqdnHostnames-1341387601-project-member] [instance: 6996289c-f881-4733-8943-98e3633e43ca] Caching image {{(pid=61962) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 1131.564162] env[61962]: DEBUG nova.virt.vmwareapi.vm_util [None req-90db2dbd-038d-42b8-af4c-dd3300a68214 tempest-ServersV294TestFqdnHostnames-1341387601 tempest-ServersV294TestFqdnHostnames-1341387601-project-member] Copying Virtual Disk [datastore2] vmware_temp/8b899285-0b14-437c-b445-5034746ae90e/f684bb17-3ab2-4bd5-a19e-4c36c57d0ebe/tmp-sparse.vmdk to [datastore2] vmware_temp/8b899285-0b14-437c-b445-5034746ae90e/f684bb17-3ab2-4bd5-a19e-4c36c57d0ebe/f684bb17-3ab2-4bd5-a19e-4c36c57d0ebe.vmdk {{(pid=61962) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 1131.564506] env[61962]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-c454b8b1-e2e3-4db8-aee6-af77301700a4 {{(pid=61962) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1131.573473] env[61962]: DEBUG oslo_vmware.api [None req-90db2dbd-038d-42b8-af4c-dd3300a68214 tempest-ServersV294TestFqdnHostnames-1341387601 tempest-ServersV294TestFqdnHostnames-1341387601-project-member] Waiting for the task: (returnval){ [ 1131.573473] env[61962]: value = "task-4892002" [ 1131.573473] env[61962]: _type = "Task" [ 1131.573473] env[61962]: } to complete. {{(pid=61962) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1131.582526] env[61962]: DEBUG oslo_vmware.api [None req-90db2dbd-038d-42b8-af4c-dd3300a68214 tempest-ServersV294TestFqdnHostnames-1341387601 tempest-ServersV294TestFqdnHostnames-1341387601-project-member] Task: {'id': task-4892002, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61962) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1132.083768] env[61962]: DEBUG oslo_vmware.exceptions [None req-90db2dbd-038d-42b8-af4c-dd3300a68214 tempest-ServersV294TestFqdnHostnames-1341387601 tempest-ServersV294TestFqdnHostnames-1341387601-project-member] Fault InvalidArgument not matched. {{(pid=61962) get_fault_class /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/exceptions.py:290}} [ 1132.084094] env[61962]: DEBUG oslo_concurrency.lockutils [None req-90db2dbd-038d-42b8-af4c-dd3300a68214 tempest-ServersV294TestFqdnHostnames-1341387601 tempest-ServersV294TestFqdnHostnames-1341387601-project-member] Releasing lock "[datastore2] devstack-image-cache_base/f684bb17-3ab2-4bd5-a19e-4c36c57d0ebe/f684bb17-3ab2-4bd5-a19e-4c36c57d0ebe.vmdk" {{(pid=61962) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1132.084652] env[61962]: ERROR nova.compute.manager [None req-90db2dbd-038d-42b8-af4c-dd3300a68214 tempest-ServersV294TestFqdnHostnames-1341387601 tempest-ServersV294TestFqdnHostnames-1341387601-project-member] [instance: 6996289c-f881-4733-8943-98e3633e43ca] Instance failed to spawn: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1132.084652] env[61962]: Faults: ['InvalidArgument'] [ 1132.084652] env[61962]: ERROR nova.compute.manager [instance: 6996289c-f881-4733-8943-98e3633e43ca] Traceback (most recent call last): [ 1132.084652] env[61962]: ERROR nova.compute.manager [instance: 6996289c-f881-4733-8943-98e3633e43ca] File "/opt/stack/nova/nova/compute/manager.py", line 2886, in _build_resources [ 1132.084652] env[61962]: ERROR nova.compute.manager [instance: 6996289c-f881-4733-8943-98e3633e43ca] yield resources [ 1132.084652] env[61962]: ERROR nova.compute.manager [instance: 6996289c-f881-4733-8943-98e3633e43ca] File "/opt/stack/nova/nova/compute/manager.py", line 2633, in _build_and_run_instance [ 1132.084652] env[61962]: ERROR nova.compute.manager [instance: 6996289c-f881-4733-8943-98e3633e43ca] self.driver.spawn(context, instance, image_meta, [ 1132.084652] env[61962]: ERROR nova.compute.manager [instance: 6996289c-f881-4733-8943-98e3633e43ca] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 1132.084652] env[61962]: ERROR nova.compute.manager [instance: 6996289c-f881-4733-8943-98e3633e43ca] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1132.084652] env[61962]: ERROR nova.compute.manager [instance: 6996289c-f881-4733-8943-98e3633e43ca] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 1132.084652] env[61962]: ERROR nova.compute.manager [instance: 6996289c-f881-4733-8943-98e3633e43ca] self._fetch_image_if_missing(context, vi) [ 1132.084652] env[61962]: ERROR nova.compute.manager [instance: 6996289c-f881-4733-8943-98e3633e43ca] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 1132.085045] env[61962]: ERROR nova.compute.manager [instance: 6996289c-f881-4733-8943-98e3633e43ca] image_cache(vi, tmp_image_ds_loc) [ 1132.085045] env[61962]: ERROR nova.compute.manager [instance: 6996289c-f881-4733-8943-98e3633e43ca] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 1132.085045] env[61962]: ERROR nova.compute.manager [instance: 6996289c-f881-4733-8943-98e3633e43ca] vm_util.copy_virtual_disk( [ 1132.085045] env[61962]: ERROR nova.compute.manager [instance: 6996289c-f881-4733-8943-98e3633e43ca] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 1132.085045] env[61962]: ERROR nova.compute.manager [instance: 6996289c-f881-4733-8943-98e3633e43ca] session._wait_for_task(vmdk_copy_task) [ 1132.085045] env[61962]: ERROR nova.compute.manager [instance: 6996289c-f881-4733-8943-98e3633e43ca] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 1132.085045] env[61962]: ERROR nova.compute.manager [instance: 6996289c-f881-4733-8943-98e3633e43ca] return self.wait_for_task(task_ref) [ 1132.085045] env[61962]: ERROR nova.compute.manager [instance: 6996289c-f881-4733-8943-98e3633e43ca] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 1132.085045] env[61962]: ERROR nova.compute.manager [instance: 6996289c-f881-4733-8943-98e3633e43ca] return evt.wait() [ 1132.085045] env[61962]: ERROR nova.compute.manager [instance: 6996289c-f881-4733-8943-98e3633e43ca] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 1132.085045] env[61962]: ERROR nova.compute.manager [instance: 6996289c-f881-4733-8943-98e3633e43ca] result = hub.switch() [ 1132.085045] env[61962]: ERROR nova.compute.manager [instance: 6996289c-f881-4733-8943-98e3633e43ca] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 1132.085045] env[61962]: ERROR nova.compute.manager [instance: 6996289c-f881-4733-8943-98e3633e43ca] return self.greenlet.switch() [ 1132.085414] env[61962]: ERROR nova.compute.manager [instance: 6996289c-f881-4733-8943-98e3633e43ca] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 1132.085414] env[61962]: ERROR nova.compute.manager [instance: 6996289c-f881-4733-8943-98e3633e43ca] self.f(*self.args, **self.kw) [ 1132.085414] env[61962]: ERROR nova.compute.manager [instance: 6996289c-f881-4733-8943-98e3633e43ca] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 1132.085414] env[61962]: ERROR nova.compute.manager [instance: 6996289c-f881-4733-8943-98e3633e43ca] raise exceptions.translate_fault(task_info.error) [ 1132.085414] env[61962]: ERROR nova.compute.manager [instance: 6996289c-f881-4733-8943-98e3633e43ca] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1132.085414] env[61962]: ERROR nova.compute.manager [instance: 6996289c-f881-4733-8943-98e3633e43ca] Faults: ['InvalidArgument'] [ 1132.085414] env[61962]: ERROR nova.compute.manager [instance: 6996289c-f881-4733-8943-98e3633e43ca] [ 1132.085414] env[61962]: INFO nova.compute.manager [None req-90db2dbd-038d-42b8-af4c-dd3300a68214 tempest-ServersV294TestFqdnHostnames-1341387601 tempest-ServersV294TestFqdnHostnames-1341387601-project-member] [instance: 6996289c-f881-4733-8943-98e3633e43ca] Terminating instance [ 1132.086923] env[61962]: DEBUG oslo_concurrency.lockutils [None req-2457412b-4345-46bb-8773-263f9b3b3e38 tempest-MultipleCreateTestJSON-937113120 tempest-MultipleCreateTestJSON-937113120-project-member] Acquired lock "[datastore2] devstack-image-cache_base/f684bb17-3ab2-4bd5-a19e-4c36c57d0ebe/f684bb17-3ab2-4bd5-a19e-4c36c57d0ebe.vmdk" {{(pid=61962) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1132.086923] env[61962]: DEBUG nova.virt.vmwareapi.ds_util [None req-2457412b-4345-46bb-8773-263f9b3b3e38 tempest-MultipleCreateTestJSON-937113120 tempest-MultipleCreateTestJSON-937113120-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=61962) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1132.087218] env[61962]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-574d968d-d3fe-420d-aaf4-383557d4d088 {{(pid=61962) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1132.089897] env[61962]: DEBUG nova.compute.manager [None req-90db2dbd-038d-42b8-af4c-dd3300a68214 tempest-ServersV294TestFqdnHostnames-1341387601 tempest-ServersV294TestFqdnHostnames-1341387601-project-member] [instance: 6996289c-f881-4733-8943-98e3633e43ca] Start destroying the instance on the hypervisor. {{(pid=61962) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3142}} [ 1132.090044] env[61962]: DEBUG nova.virt.vmwareapi.vmops [None req-90db2dbd-038d-42b8-af4c-dd3300a68214 tempest-ServersV294TestFqdnHostnames-1341387601 tempest-ServersV294TestFqdnHostnames-1341387601-project-member] [instance: 6996289c-f881-4733-8943-98e3633e43ca] Destroying instance {{(pid=61962) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1132.090858] env[61962]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cfb00136-254c-4f7d-ade8-244f90022687 {{(pid=61962) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1132.098609] env[61962]: DEBUG nova.virt.vmwareapi.vmops [None req-90db2dbd-038d-42b8-af4c-dd3300a68214 tempest-ServersV294TestFqdnHostnames-1341387601 tempest-ServersV294TestFqdnHostnames-1341387601-project-member] [instance: 6996289c-f881-4733-8943-98e3633e43ca] Unregistering the VM {{(pid=61962) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 1132.098849] env[61962]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-d1f47847-43d7-4cc1-b527-9a1195693615 {{(pid=61962) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1132.101118] env[61962]: DEBUG nova.virt.vmwareapi.ds_util [None req-2457412b-4345-46bb-8773-263f9b3b3e38 tempest-MultipleCreateTestJSON-937113120 tempest-MultipleCreateTestJSON-937113120-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=61962) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1132.101301] env[61962]: DEBUG nova.virt.vmwareapi.vmops [None req-2457412b-4345-46bb-8773-263f9b3b3e38 tempest-MultipleCreateTestJSON-937113120 tempest-MultipleCreateTestJSON-937113120-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=61962) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 1132.102275] env[61962]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-c730b34d-be82-4299-aeb0-f543464b4557 {{(pid=61962) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1132.109221] env[61962]: DEBUG oslo_vmware.api [None req-2457412b-4345-46bb-8773-263f9b3b3e38 tempest-MultipleCreateTestJSON-937113120 tempest-MultipleCreateTestJSON-937113120-project-member] Waiting for the task: (returnval){ [ 1132.109221] env[61962]: value = "session[5210918f-aadc-9b29-42fa-0929c2e42627]52c69f41-e7da-c337-8766-a9f424f04e6d" [ 1132.109221] env[61962]: _type = "Task" [ 1132.109221] env[61962]: } to complete. {{(pid=61962) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1132.122949] env[61962]: DEBUG oslo_vmware.api [None req-2457412b-4345-46bb-8773-263f9b3b3e38 tempest-MultipleCreateTestJSON-937113120 tempest-MultipleCreateTestJSON-937113120-project-member] Task: {'id': session[5210918f-aadc-9b29-42fa-0929c2e42627]52c69f41-e7da-c337-8766-a9f424f04e6d, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61962) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1132.180261] env[61962]: DEBUG nova.virt.vmwareapi.vmops [None req-90db2dbd-038d-42b8-af4c-dd3300a68214 tempest-ServersV294TestFqdnHostnames-1341387601 tempest-ServersV294TestFqdnHostnames-1341387601-project-member] [instance: 6996289c-f881-4733-8943-98e3633e43ca] Unregistered the VM {{(pid=61962) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 1132.180594] env[61962]: DEBUG nova.virt.vmwareapi.vmops [None req-90db2dbd-038d-42b8-af4c-dd3300a68214 tempest-ServersV294TestFqdnHostnames-1341387601 tempest-ServersV294TestFqdnHostnames-1341387601-project-member] [instance: 6996289c-f881-4733-8943-98e3633e43ca] Deleting contents of the VM from datastore datastore2 {{(pid=61962) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 1132.180788] env[61962]: DEBUG nova.virt.vmwareapi.ds_util [None req-90db2dbd-038d-42b8-af4c-dd3300a68214 tempest-ServersV294TestFqdnHostnames-1341387601 tempest-ServersV294TestFqdnHostnames-1341387601-project-member] Deleting the datastore file [datastore2] 6996289c-f881-4733-8943-98e3633e43ca {{(pid=61962) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1132.181088] env[61962]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-9dcd65a2-231a-46da-bbec-d89c10b03b7c {{(pid=61962) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1132.187779] env[61962]: DEBUG oslo_vmware.api [None req-90db2dbd-038d-42b8-af4c-dd3300a68214 tempest-ServersV294TestFqdnHostnames-1341387601 tempest-ServersV294TestFqdnHostnames-1341387601-project-member] Waiting for the task: (returnval){ [ 1132.187779] env[61962]: value = "task-4892004" [ 1132.187779] env[61962]: _type = "Task" [ 1132.187779] env[61962]: } to complete. {{(pid=61962) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1132.196443] env[61962]: DEBUG oslo_vmware.api [None req-90db2dbd-038d-42b8-af4c-dd3300a68214 tempest-ServersV294TestFqdnHostnames-1341387601 tempest-ServersV294TestFqdnHostnames-1341387601-project-member] Task: {'id': task-4892004, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61962) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1132.620332] env[61962]: DEBUG nova.virt.vmwareapi.vmops [None req-2457412b-4345-46bb-8773-263f9b3b3e38 tempest-MultipleCreateTestJSON-937113120 tempest-MultipleCreateTestJSON-937113120-project-member] [instance: 85c6b6af-f1b5-47ca-8e02-61c3669ec02b] Preparing fetch location {{(pid=61962) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 1132.620704] env[61962]: DEBUG nova.virt.vmwareapi.ds_util [None req-2457412b-4345-46bb-8773-263f9b3b3e38 tempest-MultipleCreateTestJSON-937113120 tempest-MultipleCreateTestJSON-937113120-project-member] Creating directory with path [datastore2] vmware_temp/dd187ae4-aa78-4188-8c6f-2bd6c3226065/f684bb17-3ab2-4bd5-a19e-4c36c57d0ebe {{(pid=61962) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1132.620951] env[61962]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-80f167f4-7276-4257-a180-6b0cc9f74ec0 {{(pid=61962) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1132.640145] env[61962]: DEBUG nova.virt.vmwareapi.ds_util [None req-2457412b-4345-46bb-8773-263f9b3b3e38 tempest-MultipleCreateTestJSON-937113120 tempest-MultipleCreateTestJSON-937113120-project-member] Created directory with path [datastore2] vmware_temp/dd187ae4-aa78-4188-8c6f-2bd6c3226065/f684bb17-3ab2-4bd5-a19e-4c36c57d0ebe {{(pid=61962) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1132.640502] env[61962]: DEBUG nova.virt.vmwareapi.vmops [None req-2457412b-4345-46bb-8773-263f9b3b3e38 tempest-MultipleCreateTestJSON-937113120 tempest-MultipleCreateTestJSON-937113120-project-member] [instance: 85c6b6af-f1b5-47ca-8e02-61c3669ec02b] Fetch image to [datastore2] vmware_temp/dd187ae4-aa78-4188-8c6f-2bd6c3226065/f684bb17-3ab2-4bd5-a19e-4c36c57d0ebe/tmp-sparse.vmdk {{(pid=61962) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 1132.640799] env[61962]: DEBUG nova.virt.vmwareapi.vmops [None req-2457412b-4345-46bb-8773-263f9b3b3e38 tempest-MultipleCreateTestJSON-937113120 tempest-MultipleCreateTestJSON-937113120-project-member] [instance: 85c6b6af-f1b5-47ca-8e02-61c3669ec02b] Downloading image file data f684bb17-3ab2-4bd5-a19e-4c36c57d0ebe to [datastore2] vmware_temp/dd187ae4-aa78-4188-8c6f-2bd6c3226065/f684bb17-3ab2-4bd5-a19e-4c36c57d0ebe/tmp-sparse.vmdk on the data store datastore2 {{(pid=61962) _fetch_image_as_file /opt/stack/nova/nova/virt/vmwareapi/vmops.py:399}} [ 1132.642290] env[61962]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8eb1d5cf-7c6e-4afc-98b1-e910f17b10c8 {{(pid=61962) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1132.654021] env[61962]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c7f72572-4613-469a-ab94-b02cef896685 {{(pid=61962) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1132.664687] env[61962]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2a5c88c5-ee64-492a-90b7-d02b596758fb {{(pid=61962) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1132.703388] env[61962]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d53cd40c-7e9d-4b41-be57-a2d3e2b26c65 {{(pid=61962) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1132.711941] env[61962]: DEBUG oslo_vmware.api [None req-90db2dbd-038d-42b8-af4c-dd3300a68214 tempest-ServersV294TestFqdnHostnames-1341387601 tempest-ServersV294TestFqdnHostnames-1341387601-project-member] Task: {'id': task-4892004, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.076402} completed successfully. {{(pid=61962) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1132.714033] env[61962]: DEBUG nova.virt.vmwareapi.ds_util [None req-90db2dbd-038d-42b8-af4c-dd3300a68214 tempest-ServersV294TestFqdnHostnames-1341387601 tempest-ServersV294TestFqdnHostnames-1341387601-project-member] Deleted the datastore file {{(pid=61962) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1132.714139] env[61962]: DEBUG nova.virt.vmwareapi.vmops [None req-90db2dbd-038d-42b8-af4c-dd3300a68214 tempest-ServersV294TestFqdnHostnames-1341387601 tempest-ServersV294TestFqdnHostnames-1341387601-project-member] [instance: 6996289c-f881-4733-8943-98e3633e43ca] Deleted contents of the VM from datastore datastore2 {{(pid=61962) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 1132.714318] env[61962]: DEBUG nova.virt.vmwareapi.vmops [None req-90db2dbd-038d-42b8-af4c-dd3300a68214 tempest-ServersV294TestFqdnHostnames-1341387601 tempest-ServersV294TestFqdnHostnames-1341387601-project-member] [instance: 6996289c-f881-4733-8943-98e3633e43ca] Instance destroyed {{(pid=61962) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1132.714520] env[61962]: INFO nova.compute.manager [None req-90db2dbd-038d-42b8-af4c-dd3300a68214 tempest-ServersV294TestFqdnHostnames-1341387601 tempest-ServersV294TestFqdnHostnames-1341387601-project-member] [instance: 6996289c-f881-4733-8943-98e3633e43ca] Took 0.62 seconds to destroy the instance on the hypervisor. [ 1132.716762] env[61962]: DEBUG oslo_vmware.service [-] Invoking SessionManager.AcquireGenericServiceTicket with opID=oslo.vmware-3783cc3d-15d7-40d6-9521-7d07083bc632 {{(pid=61962) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1132.719019] env[61962]: DEBUG nova.compute.claims [None req-90db2dbd-038d-42b8-af4c-dd3300a68214 tempest-ServersV294TestFqdnHostnames-1341387601 tempest-ServersV294TestFqdnHostnames-1341387601-project-member] [instance: 6996289c-f881-4733-8943-98e3633e43ca] Aborting claim: {{(pid=61962) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 1132.719246] env[61962]: DEBUG oslo_concurrency.lockutils [None req-90db2dbd-038d-42b8-af4c-dd3300a68214 tempest-ServersV294TestFqdnHostnames-1341387601 tempest-ServersV294TestFqdnHostnames-1341387601-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=61962) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1132.719440] env[61962]: DEBUG oslo_concurrency.lockutils [None req-90db2dbd-038d-42b8-af4c-dd3300a68214 tempest-ServersV294TestFqdnHostnames-1341387601 tempest-ServersV294TestFqdnHostnames-1341387601-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 0.000s {{(pid=61962) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1132.746467] env[61962]: DEBUG nova.virt.vmwareapi.images [None req-2457412b-4345-46bb-8773-263f9b3b3e38 tempest-MultipleCreateTestJSON-937113120 tempest-MultipleCreateTestJSON-937113120-project-member] [instance: 85c6b6af-f1b5-47ca-8e02-61c3669ec02b] Downloading image file data f684bb17-3ab2-4bd5-a19e-4c36c57d0ebe to the data store datastore2 {{(pid=61962) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:245}} [ 1132.813976] env[61962]: DEBUG oslo_vmware.rw_handles [None req-2457412b-4345-46bb-8773-263f9b3b3e38 tempest-MultipleCreateTestJSON-937113120 tempest-MultipleCreateTestJSON-937113120-project-member] Creating HTTP connection to write to file with size = 21318656 and URL = https://esx7c2n3.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/dd187ae4-aa78-4188-8c6f-2bd6c3226065/f684bb17-3ab2-4bd5-a19e-4c36c57d0ebe/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=61962) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 1132.874537] env[61962]: DEBUG oslo_vmware.rw_handles [None req-2457412b-4345-46bb-8773-263f9b3b3e38 tempest-MultipleCreateTestJSON-937113120 tempest-MultipleCreateTestJSON-937113120-project-member] Completed reading data from the image iterator. {{(pid=61962) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 1132.874537] env[61962]: DEBUG oslo_vmware.rw_handles [None req-2457412b-4345-46bb-8773-263f9b3b3e38 tempest-MultipleCreateTestJSON-937113120 tempest-MultipleCreateTestJSON-937113120-project-member] Closing write handle for https://esx7c2n3.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/dd187ae4-aa78-4188-8c6f-2bd6c3226065/f684bb17-3ab2-4bd5-a19e-4c36c57d0ebe/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=61962) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:281}} [ 1133.115975] env[61962]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a5c89c48-6b65-4078-a574-4a76312f36d9 {{(pid=61962) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1133.123954] env[61962]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bd07b78f-21e6-4c84-a377-22a79ed8fb38 {{(pid=61962) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1133.155058] env[61962]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1c7dfbb5-df1c-424e-ab3b-d827b861efdb {{(pid=61962) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1133.163109] env[61962]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bdff30b1-04a2-4877-82ee-268a1c7b37b5 {{(pid=61962) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1133.176566] env[61962]: DEBUG nova.compute.provider_tree [None req-90db2dbd-038d-42b8-af4c-dd3300a68214 tempest-ServersV294TestFqdnHostnames-1341387601 tempest-ServersV294TestFqdnHostnames-1341387601-project-member] Inventory has not changed in ProviderTree for provider: 5a20dc57-fddd-49ec-bab5-953a03eebaa1 {{(pid=61962) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1133.185870] env[61962]: DEBUG nova.scheduler.client.report [None req-90db2dbd-038d-42b8-af4c-dd3300a68214 tempest-ServersV294TestFqdnHostnames-1341387601 tempest-ServersV294TestFqdnHostnames-1341387601-project-member] Inventory has not changed for provider 5a20dc57-fddd-49ec-bab5-953a03eebaa1 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 96, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61962) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1133.199750] env[61962]: DEBUG oslo_concurrency.lockutils [None req-90db2dbd-038d-42b8-af4c-dd3300a68214 tempest-ServersV294TestFqdnHostnames-1341387601 tempest-ServersV294TestFqdnHostnames-1341387601-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 0.480s {{(pid=61962) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1133.200296] env[61962]: ERROR nova.compute.manager [None req-90db2dbd-038d-42b8-af4c-dd3300a68214 tempest-ServersV294TestFqdnHostnames-1341387601 tempest-ServersV294TestFqdnHostnames-1341387601-project-member] [instance: 6996289c-f881-4733-8943-98e3633e43ca] Failed to build and run instance: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1133.200296] env[61962]: Faults: ['InvalidArgument'] [ 1133.200296] env[61962]: ERROR nova.compute.manager [instance: 6996289c-f881-4733-8943-98e3633e43ca] Traceback (most recent call last): [ 1133.200296] env[61962]: ERROR nova.compute.manager [instance: 6996289c-f881-4733-8943-98e3633e43ca] File "/opt/stack/nova/nova/compute/manager.py", line 2633, in _build_and_run_instance [ 1133.200296] env[61962]: ERROR nova.compute.manager [instance: 6996289c-f881-4733-8943-98e3633e43ca] self.driver.spawn(context, instance, image_meta, [ 1133.200296] env[61962]: ERROR nova.compute.manager [instance: 6996289c-f881-4733-8943-98e3633e43ca] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 1133.200296] env[61962]: ERROR nova.compute.manager [instance: 6996289c-f881-4733-8943-98e3633e43ca] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1133.200296] env[61962]: ERROR nova.compute.manager [instance: 6996289c-f881-4733-8943-98e3633e43ca] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 1133.200296] env[61962]: ERROR nova.compute.manager [instance: 6996289c-f881-4733-8943-98e3633e43ca] self._fetch_image_if_missing(context, vi) [ 1133.200296] env[61962]: ERROR nova.compute.manager [instance: 6996289c-f881-4733-8943-98e3633e43ca] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 1133.200296] env[61962]: ERROR nova.compute.manager [instance: 6996289c-f881-4733-8943-98e3633e43ca] image_cache(vi, tmp_image_ds_loc) [ 1133.200296] env[61962]: ERROR nova.compute.manager [instance: 6996289c-f881-4733-8943-98e3633e43ca] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 1133.200718] env[61962]: ERROR nova.compute.manager [instance: 6996289c-f881-4733-8943-98e3633e43ca] vm_util.copy_virtual_disk( [ 1133.200718] env[61962]: ERROR nova.compute.manager [instance: 6996289c-f881-4733-8943-98e3633e43ca] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 1133.200718] env[61962]: ERROR nova.compute.manager [instance: 6996289c-f881-4733-8943-98e3633e43ca] session._wait_for_task(vmdk_copy_task) [ 1133.200718] env[61962]: ERROR nova.compute.manager [instance: 6996289c-f881-4733-8943-98e3633e43ca] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 1133.200718] env[61962]: ERROR nova.compute.manager [instance: 6996289c-f881-4733-8943-98e3633e43ca] return self.wait_for_task(task_ref) [ 1133.200718] env[61962]: ERROR nova.compute.manager [instance: 6996289c-f881-4733-8943-98e3633e43ca] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 1133.200718] env[61962]: ERROR nova.compute.manager [instance: 6996289c-f881-4733-8943-98e3633e43ca] return evt.wait() [ 1133.200718] env[61962]: ERROR nova.compute.manager [instance: 6996289c-f881-4733-8943-98e3633e43ca] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 1133.200718] env[61962]: ERROR nova.compute.manager [instance: 6996289c-f881-4733-8943-98e3633e43ca] result = hub.switch() [ 1133.200718] env[61962]: ERROR nova.compute.manager [instance: 6996289c-f881-4733-8943-98e3633e43ca] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 1133.200718] env[61962]: ERROR nova.compute.manager [instance: 6996289c-f881-4733-8943-98e3633e43ca] return self.greenlet.switch() [ 1133.200718] env[61962]: ERROR nova.compute.manager [instance: 6996289c-f881-4733-8943-98e3633e43ca] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 1133.200718] env[61962]: ERROR nova.compute.manager [instance: 6996289c-f881-4733-8943-98e3633e43ca] self.f(*self.args, **self.kw) [ 1133.201113] env[61962]: ERROR nova.compute.manager [instance: 6996289c-f881-4733-8943-98e3633e43ca] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 1133.201113] env[61962]: ERROR nova.compute.manager [instance: 6996289c-f881-4733-8943-98e3633e43ca] raise exceptions.translate_fault(task_info.error) [ 1133.201113] env[61962]: ERROR nova.compute.manager [instance: 6996289c-f881-4733-8943-98e3633e43ca] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1133.201113] env[61962]: ERROR nova.compute.manager [instance: 6996289c-f881-4733-8943-98e3633e43ca] Faults: ['InvalidArgument'] [ 1133.201113] env[61962]: ERROR nova.compute.manager [instance: 6996289c-f881-4733-8943-98e3633e43ca] [ 1133.201113] env[61962]: DEBUG nova.compute.utils [None req-90db2dbd-038d-42b8-af4c-dd3300a68214 tempest-ServersV294TestFqdnHostnames-1341387601 tempest-ServersV294TestFqdnHostnames-1341387601-project-member] [instance: 6996289c-f881-4733-8943-98e3633e43ca] VimFaultException {{(pid=61962) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 1133.202545] env[61962]: DEBUG nova.compute.manager [None req-90db2dbd-038d-42b8-af4c-dd3300a68214 tempest-ServersV294TestFqdnHostnames-1341387601 tempest-ServersV294TestFqdnHostnames-1341387601-project-member] [instance: 6996289c-f881-4733-8943-98e3633e43ca] Build of instance 6996289c-f881-4733-8943-98e3633e43ca was re-scheduled: A specified parameter was not correct: fileType [ 1133.202545] env[61962]: Faults: ['InvalidArgument'] {{(pid=61962) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2472}} [ 1133.202919] env[61962]: DEBUG nova.compute.manager [None req-90db2dbd-038d-42b8-af4c-dd3300a68214 tempest-ServersV294TestFqdnHostnames-1341387601 tempest-ServersV294TestFqdnHostnames-1341387601-project-member] [instance: 6996289c-f881-4733-8943-98e3633e43ca] Unplugging VIFs for instance {{(pid=61962) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:2998}} [ 1133.203113] env[61962]: DEBUG nova.compute.manager [None req-90db2dbd-038d-42b8-af4c-dd3300a68214 tempest-ServersV294TestFqdnHostnames-1341387601 tempest-ServersV294TestFqdnHostnames-1341387601-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=61962) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3021}} [ 1133.203289] env[61962]: DEBUG nova.compute.manager [None req-90db2dbd-038d-42b8-af4c-dd3300a68214 tempest-ServersV294TestFqdnHostnames-1341387601 tempest-ServersV294TestFqdnHostnames-1341387601-project-member] [instance: 6996289c-f881-4733-8943-98e3633e43ca] Deallocating network for instance {{(pid=61962) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2281}} [ 1133.203456] env[61962]: DEBUG nova.network.neutron [None req-90db2dbd-038d-42b8-af4c-dd3300a68214 tempest-ServersV294TestFqdnHostnames-1341387601 tempest-ServersV294TestFqdnHostnames-1341387601-project-member] [instance: 6996289c-f881-4733-8943-98e3633e43ca] deallocate_for_instance() {{(pid=61962) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1133.559646] env[61962]: DEBUG nova.network.neutron [None req-90db2dbd-038d-42b8-af4c-dd3300a68214 tempest-ServersV294TestFqdnHostnames-1341387601 tempest-ServersV294TestFqdnHostnames-1341387601-project-member] [instance: 6996289c-f881-4733-8943-98e3633e43ca] Updating instance_info_cache with network_info: [] {{(pid=61962) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1133.574403] env[61962]: INFO nova.compute.manager [None req-90db2dbd-038d-42b8-af4c-dd3300a68214 tempest-ServersV294TestFqdnHostnames-1341387601 tempest-ServersV294TestFqdnHostnames-1341387601-project-member] [instance: 6996289c-f881-4733-8943-98e3633e43ca] Took 0.37 seconds to deallocate network for instance. [ 1133.671855] env[61962]: INFO nova.scheduler.client.report [None req-90db2dbd-038d-42b8-af4c-dd3300a68214 tempest-ServersV294TestFqdnHostnames-1341387601 tempest-ServersV294TestFqdnHostnames-1341387601-project-member] Deleted allocations for instance 6996289c-f881-4733-8943-98e3633e43ca [ 1133.695953] env[61962]: DEBUG oslo_concurrency.lockutils [None req-90db2dbd-038d-42b8-af4c-dd3300a68214 tempest-ServersV294TestFqdnHostnames-1341387601 tempest-ServersV294TestFqdnHostnames-1341387601-project-member] Lock "6996289c-f881-4733-8943-98e3633e43ca" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 552.765s {{(pid=61962) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1133.696539] env[61962]: DEBUG oslo_concurrency.lockutils [None req-067e3123-b7e7-4a03-a777-b5b7270a1ee5 tempest-ServersV294TestFqdnHostnames-1341387601 tempest-ServersV294TestFqdnHostnames-1341387601-project-member] Lock "6996289c-f881-4733-8943-98e3633e43ca" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 353.983s {{(pid=61962) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1133.696766] env[61962]: DEBUG oslo_concurrency.lockutils [None req-067e3123-b7e7-4a03-a777-b5b7270a1ee5 tempest-ServersV294TestFqdnHostnames-1341387601 tempest-ServersV294TestFqdnHostnames-1341387601-project-member] Acquiring lock "6996289c-f881-4733-8943-98e3633e43ca-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=61962) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1133.696967] env[61962]: DEBUG oslo_concurrency.lockutils [None req-067e3123-b7e7-4a03-a777-b5b7270a1ee5 tempest-ServersV294TestFqdnHostnames-1341387601 tempest-ServersV294TestFqdnHostnames-1341387601-project-member] Lock "6996289c-f881-4733-8943-98e3633e43ca-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=61962) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1133.697148] env[61962]: DEBUG oslo_concurrency.lockutils [None req-067e3123-b7e7-4a03-a777-b5b7270a1ee5 tempest-ServersV294TestFqdnHostnames-1341387601 tempest-ServersV294TestFqdnHostnames-1341387601-project-member] Lock "6996289c-f881-4733-8943-98e3633e43ca-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=61962) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1133.699046] env[61962]: INFO nova.compute.manager [None req-067e3123-b7e7-4a03-a777-b5b7270a1ee5 tempest-ServersV294TestFqdnHostnames-1341387601 tempest-ServersV294TestFqdnHostnames-1341387601-project-member] [instance: 6996289c-f881-4733-8943-98e3633e43ca] Terminating instance [ 1133.701075] env[61962]: DEBUG nova.compute.manager [None req-067e3123-b7e7-4a03-a777-b5b7270a1ee5 tempest-ServersV294TestFqdnHostnames-1341387601 tempest-ServersV294TestFqdnHostnames-1341387601-project-member] [instance: 6996289c-f881-4733-8943-98e3633e43ca] Start destroying the instance on the hypervisor. {{(pid=61962) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3142}} [ 1133.701282] env[61962]: DEBUG nova.virt.vmwareapi.vmops [None req-067e3123-b7e7-4a03-a777-b5b7270a1ee5 tempest-ServersV294TestFqdnHostnames-1341387601 tempest-ServersV294TestFqdnHostnames-1341387601-project-member] [instance: 6996289c-f881-4733-8943-98e3633e43ca] Destroying instance {{(pid=61962) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1133.701806] env[61962]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-0e02e15e-7a6b-462c-b435-e805a1d7209f {{(pid=61962) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1133.713120] env[61962]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fb334755-8b3f-489c-8244-4bf00af7b075 {{(pid=61962) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1133.725615] env[61962]: DEBUG nova.compute.manager [None req-7a230fc5-6af3-471e-9f4a-043813914279 tempest-ServerActionsTestJSON-2003961310 tempest-ServerActionsTestJSON-2003961310-project-member] [instance: 5572a1d3-5d2f-4af5-b9f6-ec57506e2069] Starting instance... {{(pid=61962) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2424}} [ 1133.749721] env[61962]: WARNING nova.virt.vmwareapi.vmops [None req-067e3123-b7e7-4a03-a777-b5b7270a1ee5 tempest-ServersV294TestFqdnHostnames-1341387601 tempest-ServersV294TestFqdnHostnames-1341387601-project-member] [instance: 6996289c-f881-4733-8943-98e3633e43ca] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 6996289c-f881-4733-8943-98e3633e43ca could not be found. [ 1133.749721] env[61962]: DEBUG nova.virt.vmwareapi.vmops [None req-067e3123-b7e7-4a03-a777-b5b7270a1ee5 tempest-ServersV294TestFqdnHostnames-1341387601 tempest-ServersV294TestFqdnHostnames-1341387601-project-member] [instance: 6996289c-f881-4733-8943-98e3633e43ca] Instance destroyed {{(pid=61962) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1133.749721] env[61962]: INFO nova.compute.manager [None req-067e3123-b7e7-4a03-a777-b5b7270a1ee5 tempest-ServersV294TestFqdnHostnames-1341387601 tempest-ServersV294TestFqdnHostnames-1341387601-project-member] [instance: 6996289c-f881-4733-8943-98e3633e43ca] Took 0.05 seconds to destroy the instance on the hypervisor. [ 1133.750055] env[61962]: DEBUG oslo.service.loopingcall [None req-067e3123-b7e7-4a03-a777-b5b7270a1ee5 tempest-ServersV294TestFqdnHostnames-1341387601 tempest-ServersV294TestFqdnHostnames-1341387601-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61962) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1133.750318] env[61962]: DEBUG nova.compute.manager [-] [instance: 6996289c-f881-4733-8943-98e3633e43ca] Deallocating network for instance {{(pid=61962) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2281}} [ 1133.750490] env[61962]: DEBUG nova.network.neutron [-] [instance: 6996289c-f881-4733-8943-98e3633e43ca] deallocate_for_instance() {{(pid=61962) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1133.786572] env[61962]: DEBUG nova.network.neutron [-] [instance: 6996289c-f881-4733-8943-98e3633e43ca] Updating instance_info_cache with network_info: [] {{(pid=61962) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1133.797589] env[61962]: INFO nova.compute.manager [-] [instance: 6996289c-f881-4733-8943-98e3633e43ca] Took 0.05 seconds to deallocate network for instance. [ 1133.805777] env[61962]: DEBUG oslo_concurrency.lockutils [None req-7a230fc5-6af3-471e-9f4a-043813914279 tempest-ServerActionsTestJSON-2003961310 tempest-ServerActionsTestJSON-2003961310-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61962) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1133.806550] env[61962]: DEBUG oslo_concurrency.lockutils [None req-7a230fc5-6af3-471e-9f4a-043813914279 tempest-ServerActionsTestJSON-2003961310 tempest-ServerActionsTestJSON-2003961310-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=61962) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1133.807500] env[61962]: INFO nova.compute.claims [None req-7a230fc5-6af3-471e-9f4a-043813914279 tempest-ServerActionsTestJSON-2003961310 tempest-ServerActionsTestJSON-2003961310-project-member] [instance: 5572a1d3-5d2f-4af5-b9f6-ec57506e2069] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1133.914442] env[61962]: DEBUG oslo_concurrency.lockutils [None req-067e3123-b7e7-4a03-a777-b5b7270a1ee5 tempest-ServersV294TestFqdnHostnames-1341387601 tempest-ServersV294TestFqdnHostnames-1341387601-project-member] Lock "6996289c-f881-4733-8943-98e3633e43ca" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 0.218s {{(pid=61962) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1133.915671] env[61962]: DEBUG oslo_concurrency.lockutils [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Lock "6996289c-f881-4733-8943-98e3633e43ca" acquired by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: waited 3.399s {{(pid=61962) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1133.915883] env[61962]: INFO nova.compute.manager [None req-ba337279-4320-40ca-b616-7e590432f203 None None] [instance: 6996289c-f881-4733-8943-98e3633e43ca] During sync_power_state the instance has a pending task (deleting). Skip. [ 1133.916084] env[61962]: DEBUG oslo_concurrency.lockutils [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Lock "6996289c-f881-4733-8943-98e3633e43ca" "released" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: held 0.001s {{(pid=61962) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1134.153471] env[61962]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-96e734ec-e3ef-4821-b605-863bd23b2878 {{(pid=61962) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1134.162856] env[61962]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-60ae5709-75eb-4c28-a9d1-a32543826399 {{(pid=61962) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1134.195324] env[61962]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-aa14578a-0324-41de-ba4f-3d2b6f92d411 {{(pid=61962) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1134.204792] env[61962]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-28d1beb6-2a00-461a-89a0-b8e893062c6e {{(pid=61962) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1134.219707] env[61962]: DEBUG nova.compute.provider_tree [None req-7a230fc5-6af3-471e-9f4a-043813914279 tempest-ServerActionsTestJSON-2003961310 tempest-ServerActionsTestJSON-2003961310-project-member] Inventory has not changed in ProviderTree for provider: 5a20dc57-fddd-49ec-bab5-953a03eebaa1 {{(pid=61962) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1134.228497] env[61962]: DEBUG nova.scheduler.client.report [None req-7a230fc5-6af3-471e-9f4a-043813914279 tempest-ServerActionsTestJSON-2003961310 tempest-ServerActionsTestJSON-2003961310-project-member] Inventory has not changed for provider 5a20dc57-fddd-49ec-bab5-953a03eebaa1 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 96, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61962) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1134.241679] env[61962]: DEBUG oslo_concurrency.lockutils [None req-7a230fc5-6af3-471e-9f4a-043813914279 tempest-ServerActionsTestJSON-2003961310 tempest-ServerActionsTestJSON-2003961310-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.436s {{(pid=61962) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1134.242194] env[61962]: DEBUG nova.compute.manager [None req-7a230fc5-6af3-471e-9f4a-043813914279 tempest-ServerActionsTestJSON-2003961310 tempest-ServerActionsTestJSON-2003961310-project-member] [instance: 5572a1d3-5d2f-4af5-b9f6-ec57506e2069] Start building networks asynchronously for instance. {{(pid=61962) _build_resources /opt/stack/nova/nova/compute/manager.py:2821}} [ 1134.276687] env[61962]: DEBUG nova.compute.utils [None req-7a230fc5-6af3-471e-9f4a-043813914279 tempest-ServerActionsTestJSON-2003961310 tempest-ServerActionsTestJSON-2003961310-project-member] Using /dev/sd instead of None {{(pid=61962) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1134.278220] env[61962]: DEBUG nova.compute.manager [None req-7a230fc5-6af3-471e-9f4a-043813914279 tempest-ServerActionsTestJSON-2003961310 tempest-ServerActionsTestJSON-2003961310-project-member] [instance: 5572a1d3-5d2f-4af5-b9f6-ec57506e2069] Allocating IP information in the background. {{(pid=61962) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1973}} [ 1134.278337] env[61962]: DEBUG nova.network.neutron [None req-7a230fc5-6af3-471e-9f4a-043813914279 tempest-ServerActionsTestJSON-2003961310 tempest-ServerActionsTestJSON-2003961310-project-member] [instance: 5572a1d3-5d2f-4af5-b9f6-ec57506e2069] allocate_for_instance() {{(pid=61962) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 1134.288381] env[61962]: DEBUG nova.compute.manager [None req-7a230fc5-6af3-471e-9f4a-043813914279 tempest-ServerActionsTestJSON-2003961310 tempest-ServerActionsTestJSON-2003961310-project-member] [instance: 5572a1d3-5d2f-4af5-b9f6-ec57506e2069] Start building block device mappings for instance. {{(pid=61962) _build_resources /opt/stack/nova/nova/compute/manager.py:2856}} [ 1134.346080] env[61962]: DEBUG nova.policy [None req-7a230fc5-6af3-471e-9f4a-043813914279 tempest-ServerActionsTestJSON-2003961310 tempest-ServerActionsTestJSON-2003961310-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'f592a20771c5443fbade2a3ae05625c9', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '99c377cc8b5545a2b6e712291b14cb04', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61962) authorize /opt/stack/nova/nova/policy.py:203}} [ 1134.361049] env[61962]: DEBUG nova.compute.manager [None req-7a230fc5-6af3-471e-9f4a-043813914279 tempest-ServerActionsTestJSON-2003961310 tempest-ServerActionsTestJSON-2003961310-project-member] [instance: 5572a1d3-5d2f-4af5-b9f6-ec57506e2069] Start spawning the instance on the hypervisor. {{(pid=61962) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2630}} [ 1134.387378] env[61962]: DEBUG nova.virt.hardware [None req-7a230fc5-6af3-471e-9f4a-043813914279 tempest-ServerActionsTestJSON-2003961310 tempest-ServerActionsTestJSON-2003961310-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-12-01T12:09:23Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=128,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-12-01T12:09:07Z,direct_url=,disk_format='vmdk',id=f684bb17-3ab2-4bd5-a19e-4c36c57d0ebe,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='eef556fb5c4f49b889491ae31a496de5',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-12-01T12:09:08Z,virtual_size=,visibility=), allow threads: False {{(pid=61962) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1134.387626] env[61962]: DEBUG nova.virt.hardware [None req-7a230fc5-6af3-471e-9f4a-043813914279 tempest-ServerActionsTestJSON-2003961310 tempest-ServerActionsTestJSON-2003961310-project-member] Flavor limits 0:0:0 {{(pid=61962) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1134.387788] env[61962]: DEBUG nova.virt.hardware [None req-7a230fc5-6af3-471e-9f4a-043813914279 tempest-ServerActionsTestJSON-2003961310 tempest-ServerActionsTestJSON-2003961310-project-member] Image limits 0:0:0 {{(pid=61962) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1134.387973] env[61962]: DEBUG nova.virt.hardware [None req-7a230fc5-6af3-471e-9f4a-043813914279 tempest-ServerActionsTestJSON-2003961310 tempest-ServerActionsTestJSON-2003961310-project-member] Flavor pref 0:0:0 {{(pid=61962) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1134.388136] env[61962]: DEBUG nova.virt.hardware [None req-7a230fc5-6af3-471e-9f4a-043813914279 tempest-ServerActionsTestJSON-2003961310 tempest-ServerActionsTestJSON-2003961310-project-member] Image pref 0:0:0 {{(pid=61962) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1134.388317] env[61962]: DEBUG nova.virt.hardware [None req-7a230fc5-6af3-471e-9f4a-043813914279 tempest-ServerActionsTestJSON-2003961310 tempest-ServerActionsTestJSON-2003961310-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61962) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1134.388552] env[61962]: DEBUG nova.virt.hardware [None req-7a230fc5-6af3-471e-9f4a-043813914279 tempest-ServerActionsTestJSON-2003961310 tempest-ServerActionsTestJSON-2003961310-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61962) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1134.388815] env[61962]: DEBUG nova.virt.hardware [None req-7a230fc5-6af3-471e-9f4a-043813914279 tempest-ServerActionsTestJSON-2003961310 tempest-ServerActionsTestJSON-2003961310-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61962) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1134.388894] env[61962]: DEBUG nova.virt.hardware [None req-7a230fc5-6af3-471e-9f4a-043813914279 tempest-ServerActionsTestJSON-2003961310 tempest-ServerActionsTestJSON-2003961310-project-member] Got 1 possible topologies {{(pid=61962) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1134.389086] env[61962]: DEBUG nova.virt.hardware [None req-7a230fc5-6af3-471e-9f4a-043813914279 tempest-ServerActionsTestJSON-2003961310 tempest-ServerActionsTestJSON-2003961310-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61962) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1134.389273] env[61962]: DEBUG nova.virt.hardware [None req-7a230fc5-6af3-471e-9f4a-043813914279 tempest-ServerActionsTestJSON-2003961310 tempest-ServerActionsTestJSON-2003961310-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61962) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1134.390195] env[61962]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5e1ed3e1-9ad3-4f16-81f3-d697bce0a6cb {{(pid=61962) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1134.399528] env[61962]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6cfb4b07-efed-4af7-b625-07be601c1310 {{(pid=61962) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1134.673928] env[61962]: DEBUG nova.network.neutron [None req-7a230fc5-6af3-471e-9f4a-043813914279 tempest-ServerActionsTestJSON-2003961310 tempest-ServerActionsTestJSON-2003961310-project-member] [instance: 5572a1d3-5d2f-4af5-b9f6-ec57506e2069] Successfully created port: bf403321-0ed4-4e4f-a6ac-ca8a05787fb9 {{(pid=61962) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1135.487340] env[61962]: DEBUG nova.network.neutron [None req-7a230fc5-6af3-471e-9f4a-043813914279 tempest-ServerActionsTestJSON-2003961310 tempest-ServerActionsTestJSON-2003961310-project-member] [instance: 5572a1d3-5d2f-4af5-b9f6-ec57506e2069] Successfully updated port: bf403321-0ed4-4e4f-a6ac-ca8a05787fb9 {{(pid=61962) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1135.497280] env[61962]: DEBUG oslo_concurrency.lockutils [None req-7a230fc5-6af3-471e-9f4a-043813914279 tempest-ServerActionsTestJSON-2003961310 tempest-ServerActionsTestJSON-2003961310-project-member] Acquiring lock "refresh_cache-5572a1d3-5d2f-4af5-b9f6-ec57506e2069" {{(pid=61962) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1135.497410] env[61962]: DEBUG oslo_concurrency.lockutils [None req-7a230fc5-6af3-471e-9f4a-043813914279 tempest-ServerActionsTestJSON-2003961310 tempest-ServerActionsTestJSON-2003961310-project-member] Acquired lock "refresh_cache-5572a1d3-5d2f-4af5-b9f6-ec57506e2069" {{(pid=61962) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1135.497563] env[61962]: DEBUG nova.network.neutron [None req-7a230fc5-6af3-471e-9f4a-043813914279 tempest-ServerActionsTestJSON-2003961310 tempest-ServerActionsTestJSON-2003961310-project-member] [instance: 5572a1d3-5d2f-4af5-b9f6-ec57506e2069] Building network info cache for instance {{(pid=61962) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 1135.541240] env[61962]: DEBUG nova.network.neutron [None req-7a230fc5-6af3-471e-9f4a-043813914279 tempest-ServerActionsTestJSON-2003961310 tempest-ServerActionsTestJSON-2003961310-project-member] [instance: 5572a1d3-5d2f-4af5-b9f6-ec57506e2069] Instance cache missing network info. {{(pid=61962) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 1135.604463] env[61962]: DEBUG nova.compute.manager [req-9355dd6b-a0cb-4b14-b81f-4f1da53a620f req-aa8c02f7-fe29-412c-b0cc-d2e8bf77796d service nova] [instance: 5572a1d3-5d2f-4af5-b9f6-ec57506e2069] Received event network-vif-plugged-bf403321-0ed4-4e4f-a6ac-ca8a05787fb9 {{(pid=61962) external_instance_event /opt/stack/nova/nova/compute/manager.py:11210}} [ 1135.604657] env[61962]: DEBUG oslo_concurrency.lockutils [req-9355dd6b-a0cb-4b14-b81f-4f1da53a620f req-aa8c02f7-fe29-412c-b0cc-d2e8bf77796d service nova] Acquiring lock "5572a1d3-5d2f-4af5-b9f6-ec57506e2069-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=61962) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1135.604789] env[61962]: DEBUG oslo_concurrency.lockutils [req-9355dd6b-a0cb-4b14-b81f-4f1da53a620f req-aa8c02f7-fe29-412c-b0cc-d2e8bf77796d service nova] Lock "5572a1d3-5d2f-4af5-b9f6-ec57506e2069-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=61962) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1135.604967] env[61962]: DEBUG oslo_concurrency.lockutils [req-9355dd6b-a0cb-4b14-b81f-4f1da53a620f req-aa8c02f7-fe29-412c-b0cc-d2e8bf77796d service nova] Lock "5572a1d3-5d2f-4af5-b9f6-ec57506e2069-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=61962) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1135.605085] env[61962]: DEBUG nova.compute.manager [req-9355dd6b-a0cb-4b14-b81f-4f1da53a620f req-aa8c02f7-fe29-412c-b0cc-d2e8bf77796d service nova] [instance: 5572a1d3-5d2f-4af5-b9f6-ec57506e2069] No waiting events found dispatching network-vif-plugged-bf403321-0ed4-4e4f-a6ac-ca8a05787fb9 {{(pid=61962) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1135.605287] env[61962]: WARNING nova.compute.manager [req-9355dd6b-a0cb-4b14-b81f-4f1da53a620f req-aa8c02f7-fe29-412c-b0cc-d2e8bf77796d service nova] [instance: 5572a1d3-5d2f-4af5-b9f6-ec57506e2069] Received unexpected event network-vif-plugged-bf403321-0ed4-4e4f-a6ac-ca8a05787fb9 for instance with vm_state building and task_state spawning. [ 1135.605546] env[61962]: DEBUG nova.compute.manager [req-9355dd6b-a0cb-4b14-b81f-4f1da53a620f req-aa8c02f7-fe29-412c-b0cc-d2e8bf77796d service nova] [instance: 5572a1d3-5d2f-4af5-b9f6-ec57506e2069] Received event network-changed-bf403321-0ed4-4e4f-a6ac-ca8a05787fb9 {{(pid=61962) external_instance_event /opt/stack/nova/nova/compute/manager.py:11210}} [ 1135.605796] env[61962]: DEBUG nova.compute.manager [req-9355dd6b-a0cb-4b14-b81f-4f1da53a620f req-aa8c02f7-fe29-412c-b0cc-d2e8bf77796d service nova] [instance: 5572a1d3-5d2f-4af5-b9f6-ec57506e2069] Refreshing instance network info cache due to event network-changed-bf403321-0ed4-4e4f-a6ac-ca8a05787fb9. {{(pid=61962) external_instance_event /opt/stack/nova/nova/compute/manager.py:11215}} [ 1135.606070] env[61962]: DEBUG oslo_concurrency.lockutils [req-9355dd6b-a0cb-4b14-b81f-4f1da53a620f req-aa8c02f7-fe29-412c-b0cc-d2e8bf77796d service nova] Acquiring lock "refresh_cache-5572a1d3-5d2f-4af5-b9f6-ec57506e2069" {{(pid=61962) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1135.724583] env[61962]: DEBUG nova.network.neutron [None req-7a230fc5-6af3-471e-9f4a-043813914279 tempest-ServerActionsTestJSON-2003961310 tempest-ServerActionsTestJSON-2003961310-project-member] [instance: 5572a1d3-5d2f-4af5-b9f6-ec57506e2069] Updating instance_info_cache with network_info: [{"id": "bf403321-0ed4-4e4f-a6ac-ca8a05787fb9", "address": "fa:16:3e:7c:ec:d1", "network": {"id": "010909f9-53c3-419b-a821-a7528bcad375", "bridge": "br-int", "label": "tempest-ServerActionsTestJSON-399791459-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "99c377cc8b5545a2b6e712291b14cb04", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ec46b14d-3310-4f2b-96c1-f53ee47d3759", "external-id": "nsx-vlan-transportzone-323", "segmentation_id": 323, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapbf403321-0e", "ovs_interfaceid": "bf403321-0ed4-4e4f-a6ac-ca8a05787fb9", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61962) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1135.739523] env[61962]: DEBUG oslo_concurrency.lockutils [None req-7a230fc5-6af3-471e-9f4a-043813914279 tempest-ServerActionsTestJSON-2003961310 tempest-ServerActionsTestJSON-2003961310-project-member] Releasing lock "refresh_cache-5572a1d3-5d2f-4af5-b9f6-ec57506e2069" {{(pid=61962) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1135.739847] env[61962]: DEBUG nova.compute.manager [None req-7a230fc5-6af3-471e-9f4a-043813914279 tempest-ServerActionsTestJSON-2003961310 tempest-ServerActionsTestJSON-2003961310-project-member] [instance: 5572a1d3-5d2f-4af5-b9f6-ec57506e2069] Instance network_info: |[{"id": "bf403321-0ed4-4e4f-a6ac-ca8a05787fb9", "address": "fa:16:3e:7c:ec:d1", "network": {"id": "010909f9-53c3-419b-a821-a7528bcad375", "bridge": "br-int", "label": "tempest-ServerActionsTestJSON-399791459-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "99c377cc8b5545a2b6e712291b14cb04", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ec46b14d-3310-4f2b-96c1-f53ee47d3759", "external-id": "nsx-vlan-transportzone-323", "segmentation_id": 323, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapbf403321-0e", "ovs_interfaceid": "bf403321-0ed4-4e4f-a6ac-ca8a05787fb9", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=61962) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1135.740212] env[61962]: DEBUG oslo_concurrency.lockutils [req-9355dd6b-a0cb-4b14-b81f-4f1da53a620f req-aa8c02f7-fe29-412c-b0cc-d2e8bf77796d service nova] Acquired lock "refresh_cache-5572a1d3-5d2f-4af5-b9f6-ec57506e2069" {{(pid=61962) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1135.740518] env[61962]: DEBUG nova.network.neutron [req-9355dd6b-a0cb-4b14-b81f-4f1da53a620f req-aa8c02f7-fe29-412c-b0cc-d2e8bf77796d service nova] [instance: 5572a1d3-5d2f-4af5-b9f6-ec57506e2069] Refreshing network info cache for port bf403321-0ed4-4e4f-a6ac-ca8a05787fb9 {{(pid=61962) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 1135.741478] env[61962]: DEBUG nova.virt.vmwareapi.vmops [None req-7a230fc5-6af3-471e-9f4a-043813914279 tempest-ServerActionsTestJSON-2003961310 tempest-ServerActionsTestJSON-2003961310-project-member] [instance: 5572a1d3-5d2f-4af5-b9f6-ec57506e2069] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:7c:ec:d1', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'ec46b14d-3310-4f2b-96c1-f53ee47d3759', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'bf403321-0ed4-4e4f-a6ac-ca8a05787fb9', 'vif_model': 'vmxnet3'}] {{(pid=61962) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1135.749541] env[61962]: DEBUG nova.virt.vmwareapi.vm_util [None req-7a230fc5-6af3-471e-9f4a-043813914279 tempest-ServerActionsTestJSON-2003961310 tempest-ServerActionsTestJSON-2003961310-project-member] Creating folder: Project (99c377cc8b5545a2b6e712291b14cb04). Parent ref: group-v953327. {{(pid=61962) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 1135.752505] env[61962]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-adbb65e5-8fae-4d7b-a07c-0daac71c1c37 {{(pid=61962) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1135.766869] env[61962]: INFO nova.virt.vmwareapi.vm_util [None req-7a230fc5-6af3-471e-9f4a-043813914279 tempest-ServerActionsTestJSON-2003961310 tempest-ServerActionsTestJSON-2003961310-project-member] Created folder: Project (99c377cc8b5545a2b6e712291b14cb04) in parent group-v953327. [ 1135.767060] env[61962]: DEBUG nova.virt.vmwareapi.vm_util [None req-7a230fc5-6af3-471e-9f4a-043813914279 tempest-ServerActionsTestJSON-2003961310 tempest-ServerActionsTestJSON-2003961310-project-member] Creating folder: Instances. Parent ref: group-v953391. {{(pid=61962) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 1135.767333] env[61962]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-ce979970-becd-425b-92c5-91c3107ae68c {{(pid=61962) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1135.777201] env[61962]: INFO nova.virt.vmwareapi.vm_util [None req-7a230fc5-6af3-471e-9f4a-043813914279 tempest-ServerActionsTestJSON-2003961310 tempest-ServerActionsTestJSON-2003961310-project-member] Created folder: Instances in parent group-v953391. [ 1135.777458] env[61962]: DEBUG oslo.service.loopingcall [None req-7a230fc5-6af3-471e-9f4a-043813914279 tempest-ServerActionsTestJSON-2003961310 tempest-ServerActionsTestJSON-2003961310-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=61962) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1135.777647] env[61962]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 5572a1d3-5d2f-4af5-b9f6-ec57506e2069] Creating VM on the ESX host {{(pid=61962) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 1135.777856] env[61962]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-6484bf4f-9886-4b38-b1ae-93a6f89d134f {{(pid=61962) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1135.802121] env[61962]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1135.802121] env[61962]: value = "task-4892007" [ 1135.802121] env[61962]: _type = "Task" [ 1135.802121] env[61962]: } to complete. {{(pid=61962) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1135.810429] env[61962]: DEBUG oslo_vmware.api [-] Task: {'id': task-4892007, 'name': CreateVM_Task} progress is 0%. {{(pid=61962) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1136.119099] env[61962]: DEBUG nova.network.neutron [req-9355dd6b-a0cb-4b14-b81f-4f1da53a620f req-aa8c02f7-fe29-412c-b0cc-d2e8bf77796d service nova] [instance: 5572a1d3-5d2f-4af5-b9f6-ec57506e2069] Updated VIF entry in instance network info cache for port bf403321-0ed4-4e4f-a6ac-ca8a05787fb9. {{(pid=61962) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 1136.119634] env[61962]: DEBUG nova.network.neutron [req-9355dd6b-a0cb-4b14-b81f-4f1da53a620f req-aa8c02f7-fe29-412c-b0cc-d2e8bf77796d service nova] [instance: 5572a1d3-5d2f-4af5-b9f6-ec57506e2069] Updating instance_info_cache with network_info: [{"id": "bf403321-0ed4-4e4f-a6ac-ca8a05787fb9", "address": "fa:16:3e:7c:ec:d1", "network": {"id": "010909f9-53c3-419b-a821-a7528bcad375", "bridge": "br-int", "label": "tempest-ServerActionsTestJSON-399791459-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "99c377cc8b5545a2b6e712291b14cb04", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ec46b14d-3310-4f2b-96c1-f53ee47d3759", "external-id": "nsx-vlan-transportzone-323", "segmentation_id": 323, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapbf403321-0e", "ovs_interfaceid": "bf403321-0ed4-4e4f-a6ac-ca8a05787fb9", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61962) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1136.130557] env[61962]: DEBUG oslo_concurrency.lockutils [req-9355dd6b-a0cb-4b14-b81f-4f1da53a620f req-aa8c02f7-fe29-412c-b0cc-d2e8bf77796d service nova] Releasing lock "refresh_cache-5572a1d3-5d2f-4af5-b9f6-ec57506e2069" {{(pid=61962) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1136.314024] env[61962]: DEBUG oslo_vmware.api [-] Task: {'id': task-4892007, 'name': CreateVM_Task} progress is 25%. {{(pid=61962) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1136.813413] env[61962]: DEBUG oslo_vmware.api [-] Task: {'id': task-4892007, 'name': CreateVM_Task, 'duration_secs': 0.650322} completed successfully. {{(pid=61962) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1136.813776] env[61962]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 5572a1d3-5d2f-4af5-b9f6-ec57506e2069] Created VM on the ESX host {{(pid=61962) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 1136.814258] env[61962]: DEBUG oslo_concurrency.lockutils [None req-7a230fc5-6af3-471e-9f4a-043813914279 tempest-ServerActionsTestJSON-2003961310 tempest-ServerActionsTestJSON-2003961310-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/f684bb17-3ab2-4bd5-a19e-4c36c57d0ebe" {{(pid=61962) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1136.814423] env[61962]: DEBUG oslo_concurrency.lockutils [None req-7a230fc5-6af3-471e-9f4a-043813914279 tempest-ServerActionsTestJSON-2003961310 tempest-ServerActionsTestJSON-2003961310-project-member] Acquired lock "[datastore2] devstack-image-cache_base/f684bb17-3ab2-4bd5-a19e-4c36c57d0ebe" {{(pid=61962) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1136.814733] env[61962]: DEBUG oslo_concurrency.lockutils [None req-7a230fc5-6af3-471e-9f4a-043813914279 tempest-ServerActionsTestJSON-2003961310 tempest-ServerActionsTestJSON-2003961310-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/f684bb17-3ab2-4bd5-a19e-4c36c57d0ebe" {{(pid=61962) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1136.814984] env[61962]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-7bad7039-3f3b-499b-b300-014b5bc73cd2 {{(pid=61962) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1136.819546] env[61962]: DEBUG oslo_vmware.api [None req-7a230fc5-6af3-471e-9f4a-043813914279 tempest-ServerActionsTestJSON-2003961310 tempest-ServerActionsTestJSON-2003961310-project-member] Waiting for the task: (returnval){ [ 1136.819546] env[61962]: value = "session[5210918f-aadc-9b29-42fa-0929c2e42627]522f7e6a-f228-c41e-04c6-7dbe62d9eeba" [ 1136.819546] env[61962]: _type = "Task" [ 1136.819546] env[61962]: } to complete. {{(pid=61962) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1136.827023] env[61962]: DEBUG oslo_vmware.api [None req-7a230fc5-6af3-471e-9f4a-043813914279 tempest-ServerActionsTestJSON-2003961310 tempest-ServerActionsTestJSON-2003961310-project-member] Task: {'id': session[5210918f-aadc-9b29-42fa-0929c2e42627]522f7e6a-f228-c41e-04c6-7dbe62d9eeba, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61962) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1137.330492] env[61962]: DEBUG oslo_concurrency.lockutils [None req-7a230fc5-6af3-471e-9f4a-043813914279 tempest-ServerActionsTestJSON-2003961310 tempest-ServerActionsTestJSON-2003961310-project-member] Releasing lock "[datastore2] devstack-image-cache_base/f684bb17-3ab2-4bd5-a19e-4c36c57d0ebe" {{(pid=61962) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1137.330759] env[61962]: DEBUG nova.virt.vmwareapi.vmops [None req-7a230fc5-6af3-471e-9f4a-043813914279 tempest-ServerActionsTestJSON-2003961310 tempest-ServerActionsTestJSON-2003961310-project-member] [instance: 5572a1d3-5d2f-4af5-b9f6-ec57506e2069] Processing image f684bb17-3ab2-4bd5-a19e-4c36c57d0ebe {{(pid=61962) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1137.330971] env[61962]: DEBUG oslo_concurrency.lockutils [None req-7a230fc5-6af3-471e-9f4a-043813914279 tempest-ServerActionsTestJSON-2003961310 tempest-ServerActionsTestJSON-2003961310-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/f684bb17-3ab2-4bd5-a19e-4c36c57d0ebe/f684bb17-3ab2-4bd5-a19e-4c36c57d0ebe.vmdk" {{(pid=61962) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1143.531911] env[61962]: DEBUG oslo_concurrency.lockutils [None req-fc105c91-72eb-43da-8701-e8664cbbaa11 tempest-ServerActionsTestJSON-2003961310 tempest-ServerActionsTestJSON-2003961310-project-member] Acquiring lock "5572a1d3-5d2f-4af5-b9f6-ec57506e2069" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=61962) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1159.246123] env[61962]: DEBUG oslo_concurrency.lockutils [None req-6a898c7a-d8c8-4cee-8be6-7d5d37c58c60 tempest-ServerPasswordTestJSON-1801851891 tempest-ServerPasswordTestJSON-1801851891-project-member] Acquiring lock "c3c36949-754e-4bab-860d-74e0cde4dfd6" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61962) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1159.246529] env[61962]: DEBUG oslo_concurrency.lockutils [None req-6a898c7a-d8c8-4cee-8be6-7d5d37c58c60 tempest-ServerPasswordTestJSON-1801851891 tempest-ServerPasswordTestJSON-1801851891-project-member] Lock "c3c36949-754e-4bab-860d-74e0cde4dfd6" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61962) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1162.158167] env[61962]: DEBUG oslo_service.periodic_task [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Running periodic task ComputeManager.update_available_resource {{(pid=61962) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1162.169794] env[61962]: DEBUG oslo_concurrency.lockutils [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=61962) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1162.170009] env[61962]: DEBUG oslo_concurrency.lockutils [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=61962) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1162.170197] env[61962]: DEBUG oslo_concurrency.lockutils [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=61962) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1162.170353] env[61962]: DEBUG nova.compute.resource_tracker [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=61962) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 1162.171495] env[61962]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f0e71b1e-5a9c-4469-9aad-3f6d6193306f {{(pid=61962) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1162.180856] env[61962]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-efb559ac-3943-4231-b04d-5d325d850f2a {{(pid=61962) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1162.196452] env[61962]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4f2e2681-3b44-4d01-8430-1eba3a71bc01 {{(pid=61962) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1162.203037] env[61962]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-df2bed22-d04f-47a0-9550-b752d5187a7f {{(pid=61962) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1162.231621] env[61962]: DEBUG nova.compute.resource_tracker [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=180548MB free_disk=96GB free_vcpus=48 pci_devices=None {{(pid=61962) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 1162.231804] env[61962]: DEBUG oslo_concurrency.lockutils [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=61962) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1162.231984] env[61962]: DEBUG oslo_concurrency.lockutils [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=61962) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1162.386734] env[61962]: DEBUG nova.compute.resource_tracker [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Instance 85c6b6af-f1b5-47ca-8e02-61c3669ec02b actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61962) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 1162.386932] env[61962]: DEBUG nova.compute.resource_tracker [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Instance 0aa340f7-a219-47f5-9d7d-dd4062643cf7 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61962) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 1162.387029] env[61962]: DEBUG nova.compute.resource_tracker [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Instance ff7d9da5-bc9a-41f2-a2e4-521d87b22117 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61962) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 1162.387161] env[61962]: DEBUG nova.compute.resource_tracker [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Instance bf82c44c-95ce-4e4c-a32f-444bba1ef5cd actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61962) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 1162.387280] env[61962]: DEBUG nova.compute.resource_tracker [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Instance 1dcafe3b-989b-4290-be54-ceccc62a323f actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61962) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 1162.387420] env[61962]: DEBUG nova.compute.resource_tracker [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Instance 454eebcc-ea18-4046-ba2e-01de1c8a954a actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61962) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 1162.387647] env[61962]: DEBUG nova.compute.resource_tracker [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Instance e76b5908-3bad-4c19-90e7-b15ca253ad80 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61962) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 1162.387697] env[61962]: DEBUG nova.compute.resource_tracker [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Instance 8ecd73a7-7f23-4994-85b9-479584831687 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61962) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 1162.387803] env[61962]: DEBUG nova.compute.resource_tracker [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Instance d89abe69-dd71-4cfe-b80b-3b2aeba0b8a9 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61962) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 1162.387998] env[61962]: DEBUG nova.compute.resource_tracker [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Instance 5572a1d3-5d2f-4af5-b9f6-ec57506e2069 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61962) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 1162.408015] env[61962]: DEBUG nova.compute.resource_tracker [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Instance b42bffa2-bf04-4287-9ae8-8cbbe107893a has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61962) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1767}} [ 1162.421679] env[61962]: DEBUG nova.compute.resource_tracker [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Instance a47d03b4-71aa-43e9-bfa4-d2a07ea89fb8 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61962) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1767}} [ 1162.433470] env[61962]: DEBUG nova.compute.resource_tracker [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Instance 2a085633-9439-464b-b0c7-00e68ed46fb3 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61962) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1767}} [ 1162.444439] env[61962]: DEBUG nova.compute.resource_tracker [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Instance d970d822-12ab-43cc-8d34-abc6eff8a68d has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61962) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1767}} [ 1162.455860] env[61962]: DEBUG nova.compute.resource_tracker [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Instance 233cd3d2-df58-4826-9bb1-a93167d4cfa4 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61962) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1767}} [ 1162.469951] env[61962]: DEBUG nova.compute.resource_tracker [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Instance 56d0be20-ee15-4c11-afa6-1630ffed7bc4 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61962) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1767}} [ 1162.481310] env[61962]: DEBUG nova.compute.resource_tracker [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Instance 7dc97bc2-eaf6-4df0-bd77-d4ee263f3a48 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61962) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1767}} [ 1162.494460] env[61962]: DEBUG nova.compute.resource_tracker [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Instance b3143d91-7fe7-45cf-9b90-c84d989394a3 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61962) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1767}} [ 1162.505914] env[61962]: DEBUG nova.compute.resource_tracker [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Instance 35bf25a7-a3c2-40f1-b415-42d6167b0a1c has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61962) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1767}} [ 1162.522104] env[61962]: DEBUG nova.compute.resource_tracker [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Instance 1f7429fe-e6f3-4b0a-bae8-38ead4607ae7 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61962) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1767}} [ 1162.532828] env[61962]: DEBUG nova.compute.resource_tracker [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Instance c3c36949-754e-4bab-860d-74e0cde4dfd6 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61962) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1767}} [ 1162.533107] env[61962]: DEBUG nova.compute.resource_tracker [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Total usable vcpus: 48, total allocated vcpus: 10 {{(pid=61962) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 1162.533273] env[61962]: DEBUG nova.compute.resource_tracker [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=1792MB phys_disk=100GB used_disk=10GB total_vcpus=48 used_vcpus=10 pci_stats=[] stats={'failed_builds': '42', 'num_instances': '10', 'num_vm_building': '10', 'num_task_deleting': '10', 'num_os_type_None': '10', 'num_proj_ed943c69c73d48e2b1b2bab311295d84': '2', 'io_workload': '10', 'num_proj_5481169540de4205a1ff7731132eb3a7': '1', 'num_proj_389fb8d4c2ce47829fe23a6ca60e9bdd': '1', 'num_proj_7738f0dd32484bf9978c5d006d088c3b': '1', 'num_proj_a31ea93dd586435ebb82c1dbfd14c2ad': '1', 'num_proj_3285f5bbd10d495e852d0ef0c0047847': '1', 'num_proj_8e79e89d66b44fa092d586334732c0be': '1', 'num_proj_e58b68272c404983993a09ad12e8c03b': '1', 'num_proj_99c377cc8b5545a2b6e712291b14cb04': '1'} {{(pid=61962) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 1162.560799] env[61962]: DEBUG nova.scheduler.client.report [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Refreshing inventories for resource provider 5a20dc57-fddd-49ec-bab5-953a03eebaa1 {{(pid=61962) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:818}} [ 1162.583719] env[61962]: DEBUG nova.scheduler.client.report [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Updating ProviderTree inventory for provider 5a20dc57-fddd-49ec-bab5-953a03eebaa1 from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 96, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61962) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:782}} [ 1162.583929] env[61962]: DEBUG nova.compute.provider_tree [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Updating inventory in ProviderTree for provider 5a20dc57-fddd-49ec-bab5-953a03eebaa1 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 96, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61962) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1162.601055] env[61962]: DEBUG nova.scheduler.client.report [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Refreshing aggregate associations for resource provider 5a20dc57-fddd-49ec-bab5-953a03eebaa1, aggregates: None {{(pid=61962) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:827}} [ 1162.627013] env[61962]: DEBUG nova.scheduler.client.report [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Refreshing trait associations for resource provider 5a20dc57-fddd-49ec-bab5-953a03eebaa1, traits: COMPUTE_IMAGE_TYPE_ISO,COMPUTE_NODE,COMPUTE_IMAGE_TYPE_VMDK,COMPUTE_NET_ATTACH_INTERFACE,COMPUTE_SAME_HOST_COLD_MIGRATE {{(pid=61962) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:839}} [ 1162.899988] env[61962]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8cc2c5af-4cbb-460b-bab8-ae6da036c07a {{(pid=61962) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1162.908342] env[61962]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7106c9bb-5c99-471f-bb32-3cbfb21b8fd8 {{(pid=61962) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1162.939646] env[61962]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c52db390-7d74-4722-9349-2f0b567d24b1 {{(pid=61962) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1162.947899] env[61962]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-227ef76a-6200-4402-bd37-f6dcc4b8c3de {{(pid=61962) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1162.961932] env[61962]: DEBUG nova.compute.provider_tree [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Inventory has not changed in ProviderTree for provider: 5a20dc57-fddd-49ec-bab5-953a03eebaa1 {{(pid=61962) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1162.971809] env[61962]: DEBUG nova.scheduler.client.report [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Inventory has not changed for provider 5a20dc57-fddd-49ec-bab5-953a03eebaa1 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 96, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61962) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1162.987949] env[61962]: DEBUG nova.compute.resource_tracker [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=61962) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 1162.988160] env[61962]: DEBUG oslo_concurrency.lockutils [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 0.756s {{(pid=61962) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1163.989380] env[61962]: DEBUG oslo_service.periodic_task [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=61962) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1163.989380] env[61962]: DEBUG nova.compute.manager [None req-ba337279-4320-40ca-b616-7e590432f203 None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=61962) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10626}} [ 1166.155291] env[61962]: DEBUG oslo_service.periodic_task [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=61962) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1167.158454] env[61962]: DEBUG oslo_service.periodic_task [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=61962) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1169.159347] env[61962]: DEBUG oslo_service.periodic_task [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=61962) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1169.159642] env[61962]: DEBUG nova.compute.manager [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Starting heal instance info cache {{(pid=61962) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10007}} [ 1169.159735] env[61962]: DEBUG nova.compute.manager [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Rebuilding the list of instances to heal {{(pid=61962) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10011}} [ 1169.183023] env[61962]: DEBUG nova.compute.manager [None req-ba337279-4320-40ca-b616-7e590432f203 None None] [instance: 85c6b6af-f1b5-47ca-8e02-61c3669ec02b] Skipping network cache update for instance because it is Building. {{(pid=61962) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10020}} [ 1169.183197] env[61962]: DEBUG nova.compute.manager [None req-ba337279-4320-40ca-b616-7e590432f203 None None] [instance: 0aa340f7-a219-47f5-9d7d-dd4062643cf7] Skipping network cache update for instance because it is Building. {{(pid=61962) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10020}} [ 1169.183330] env[61962]: DEBUG nova.compute.manager [None req-ba337279-4320-40ca-b616-7e590432f203 None None] [instance: ff7d9da5-bc9a-41f2-a2e4-521d87b22117] Skipping network cache update for instance because it is Building. {{(pid=61962) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10020}} [ 1169.183459] env[61962]: DEBUG nova.compute.manager [None req-ba337279-4320-40ca-b616-7e590432f203 None None] [instance: bf82c44c-95ce-4e4c-a32f-444bba1ef5cd] Skipping network cache update for instance because it is Building. {{(pid=61962) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10020}} [ 1169.183582] env[61962]: DEBUG nova.compute.manager [None req-ba337279-4320-40ca-b616-7e590432f203 None None] [instance: 1dcafe3b-989b-4290-be54-ceccc62a323f] Skipping network cache update for instance because it is Building. {{(pid=61962) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10020}} [ 1169.183704] env[61962]: DEBUG nova.compute.manager [None req-ba337279-4320-40ca-b616-7e590432f203 None None] [instance: 454eebcc-ea18-4046-ba2e-01de1c8a954a] Skipping network cache update for instance because it is Building. {{(pid=61962) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10020}} [ 1169.184040] env[61962]: DEBUG nova.compute.manager [None req-ba337279-4320-40ca-b616-7e590432f203 None None] [instance: e76b5908-3bad-4c19-90e7-b15ca253ad80] Skipping network cache update for instance because it is Building. {{(pid=61962) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10020}} [ 1169.184040] env[61962]: DEBUG nova.compute.manager [None req-ba337279-4320-40ca-b616-7e590432f203 None None] [instance: 8ecd73a7-7f23-4994-85b9-479584831687] Skipping network cache update for instance because it is Building. {{(pid=61962) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10020}} [ 1169.184160] env[61962]: DEBUG nova.compute.manager [None req-ba337279-4320-40ca-b616-7e590432f203 None None] [instance: d89abe69-dd71-4cfe-b80b-3b2aeba0b8a9] Skipping network cache update for instance because it is Building. {{(pid=61962) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10020}} [ 1169.184235] env[61962]: DEBUG nova.compute.manager [None req-ba337279-4320-40ca-b616-7e590432f203 None None] [instance: 5572a1d3-5d2f-4af5-b9f6-ec57506e2069] Skipping network cache update for instance because it is Building. {{(pid=61962) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10020}} [ 1169.184331] env[61962]: DEBUG nova.compute.manager [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Didn't find any instances for network info cache update. {{(pid=61962) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10093}} [ 1170.158334] env[61962]: DEBUG oslo_service.periodic_task [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=61962) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1171.159895] env[61962]: DEBUG oslo_service.periodic_task [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=61962) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1171.160286] env[61962]: DEBUG oslo_service.periodic_task [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=61962) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1172.160123] env[61962]: DEBUG oslo_service.periodic_task [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=61962) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1176.196854] env[61962]: DEBUG oslo_concurrency.lockutils [None req-7244c23f-3af8-45ba-bfca-0a8256fd126d tempest-DeleteServersAdminTestJSON-710750285 tempest-DeleteServersAdminTestJSON-710750285-project-member] Acquiring lock "75aed9fe-6947-46b3-9c7d-838bf0f108af" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61962) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1176.197755] env[61962]: DEBUG oslo_concurrency.lockutils [None req-7244c23f-3af8-45ba-bfca-0a8256fd126d tempest-DeleteServersAdminTestJSON-710750285 tempest-DeleteServersAdminTestJSON-710750285-project-member] Lock "75aed9fe-6947-46b3-9c7d-838bf0f108af" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61962) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1176.707106] env[61962]: DEBUG oslo_concurrency.lockutils [None req-5bb45e03-0340-4574-a19b-822b205eb001 tempest-MultipleCreateTestJSON-937113120 tempest-MultipleCreateTestJSON-937113120-project-member] Acquiring lock "0b089824-a1ec-476a-932a-6cba27a09e4e" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61962) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1176.707976] env[61962]: DEBUG oslo_concurrency.lockutils [None req-5bb45e03-0340-4574-a19b-822b205eb001 tempest-MultipleCreateTestJSON-937113120 tempest-MultipleCreateTestJSON-937113120-project-member] Lock "0b089824-a1ec-476a-932a-6cba27a09e4e" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61962) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1176.735325] env[61962]: DEBUG oslo_concurrency.lockutils [None req-5bb45e03-0340-4574-a19b-822b205eb001 tempest-MultipleCreateTestJSON-937113120 tempest-MultipleCreateTestJSON-937113120-project-member] Acquiring lock "0fd2335c-e9e9-41bb-b77a-22b6f2c47946" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61962) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1176.735563] env[61962]: DEBUG oslo_concurrency.lockutils [None req-5bb45e03-0340-4574-a19b-822b205eb001 tempest-MultipleCreateTestJSON-937113120 tempest-MultipleCreateTestJSON-937113120-project-member] Lock "0fd2335c-e9e9-41bb-b77a-22b6f2c47946" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61962) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1177.576304] env[61962]: DEBUG oslo_concurrency.lockutils [None req-743cda8f-d191-4576-bac3-fc65868f1692 tempest-ServerShowV247Test-1910617893 tempest-ServerShowV247Test-1910617893-project-member] Acquiring lock "4d2bbdf5-0581-4a85-b962-36686a464bff" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61962) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1177.576640] env[61962]: DEBUG oslo_concurrency.lockutils [None req-743cda8f-d191-4576-bac3-fc65868f1692 tempest-ServerShowV247Test-1910617893 tempest-ServerShowV247Test-1910617893-project-member] Lock "4d2bbdf5-0581-4a85-b962-36686a464bff" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61962) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1177.576858] env[61962]: DEBUG oslo_concurrency.lockutils [None req-b24f0ed0-aeb8-484d-b3ea-a911986d6121 tempest-ServerShowV247Test-1910617893 tempest-ServerShowV247Test-1910617893-project-member] Acquiring lock "989e57f2-6dac-4734-b895-5724b1c7a06b" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61962) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1177.577056] env[61962]: DEBUG oslo_concurrency.lockutils [None req-b24f0ed0-aeb8-484d-b3ea-a911986d6121 tempest-ServerShowV247Test-1910617893 tempest-ServerShowV247Test-1910617893-project-member] Lock "989e57f2-6dac-4734-b895-5724b1c7a06b" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61962) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1178.363041] env[61962]: WARNING oslo_vmware.rw_handles [None req-2457412b-4345-46bb-8773-263f9b3b3e38 tempest-MultipleCreateTestJSON-937113120 tempest-MultipleCreateTestJSON-937113120-project-member] Error occurred while reading the HTTP response.: http.client.RemoteDisconnected: Remote end closed connection without response [ 1178.363041] env[61962]: ERROR oslo_vmware.rw_handles Traceback (most recent call last): [ 1178.363041] env[61962]: ERROR oslo_vmware.rw_handles File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py", line 283, in close [ 1178.363041] env[61962]: ERROR oslo_vmware.rw_handles self._conn.getresponse() [ 1178.363041] env[61962]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 1375, in getresponse [ 1178.363041] env[61962]: ERROR oslo_vmware.rw_handles response.begin() [ 1178.363041] env[61962]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 318, in begin [ 1178.363041] env[61962]: ERROR oslo_vmware.rw_handles version, status, reason = self._read_status() [ 1178.363041] env[61962]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 287, in _read_status [ 1178.363041] env[61962]: ERROR oslo_vmware.rw_handles raise RemoteDisconnected("Remote end closed connection without" [ 1178.363041] env[61962]: ERROR oslo_vmware.rw_handles http.client.RemoteDisconnected: Remote end closed connection without response [ 1178.363041] env[61962]: ERROR oslo_vmware.rw_handles [ 1178.363611] env[61962]: DEBUG nova.virt.vmwareapi.images [None req-2457412b-4345-46bb-8773-263f9b3b3e38 tempest-MultipleCreateTestJSON-937113120 tempest-MultipleCreateTestJSON-937113120-project-member] [instance: 85c6b6af-f1b5-47ca-8e02-61c3669ec02b] Downloaded image file data f684bb17-3ab2-4bd5-a19e-4c36c57d0ebe to vmware_temp/dd187ae4-aa78-4188-8c6f-2bd6c3226065/f684bb17-3ab2-4bd5-a19e-4c36c57d0ebe/tmp-sparse.vmdk on the data store datastore2 {{(pid=61962) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:258}} [ 1178.365717] env[61962]: DEBUG nova.virt.vmwareapi.vmops [None req-2457412b-4345-46bb-8773-263f9b3b3e38 tempest-MultipleCreateTestJSON-937113120 tempest-MultipleCreateTestJSON-937113120-project-member] [instance: 85c6b6af-f1b5-47ca-8e02-61c3669ec02b] Caching image {{(pid=61962) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 1178.365986] env[61962]: DEBUG nova.virt.vmwareapi.vm_util [None req-2457412b-4345-46bb-8773-263f9b3b3e38 tempest-MultipleCreateTestJSON-937113120 tempest-MultipleCreateTestJSON-937113120-project-member] Copying Virtual Disk [datastore2] vmware_temp/dd187ae4-aa78-4188-8c6f-2bd6c3226065/f684bb17-3ab2-4bd5-a19e-4c36c57d0ebe/tmp-sparse.vmdk to [datastore2] vmware_temp/dd187ae4-aa78-4188-8c6f-2bd6c3226065/f684bb17-3ab2-4bd5-a19e-4c36c57d0ebe/f684bb17-3ab2-4bd5-a19e-4c36c57d0ebe.vmdk {{(pid=61962) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 1178.366331] env[61962]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-90a38b6a-d262-4907-aeca-207a9b9ec5b9 {{(pid=61962) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1178.377428] env[61962]: DEBUG oslo_vmware.api [None req-2457412b-4345-46bb-8773-263f9b3b3e38 tempest-MultipleCreateTestJSON-937113120 tempest-MultipleCreateTestJSON-937113120-project-member] Waiting for the task: (returnval){ [ 1178.377428] env[61962]: value = "task-4892008" [ 1178.377428] env[61962]: _type = "Task" [ 1178.377428] env[61962]: } to complete. {{(pid=61962) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1178.386670] env[61962]: DEBUG oslo_vmware.api [None req-2457412b-4345-46bb-8773-263f9b3b3e38 tempest-MultipleCreateTestJSON-937113120 tempest-MultipleCreateTestJSON-937113120-project-member] Task: {'id': task-4892008, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61962) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1178.889319] env[61962]: DEBUG oslo_vmware.exceptions [None req-2457412b-4345-46bb-8773-263f9b3b3e38 tempest-MultipleCreateTestJSON-937113120 tempest-MultipleCreateTestJSON-937113120-project-member] Fault InvalidArgument not matched. {{(pid=61962) get_fault_class /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/exceptions.py:290}} [ 1178.889709] env[61962]: DEBUG oslo_concurrency.lockutils [None req-2457412b-4345-46bb-8773-263f9b3b3e38 tempest-MultipleCreateTestJSON-937113120 tempest-MultipleCreateTestJSON-937113120-project-member] Releasing lock "[datastore2] devstack-image-cache_base/f684bb17-3ab2-4bd5-a19e-4c36c57d0ebe/f684bb17-3ab2-4bd5-a19e-4c36c57d0ebe.vmdk" {{(pid=61962) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1178.890386] env[61962]: ERROR nova.compute.manager [None req-2457412b-4345-46bb-8773-263f9b3b3e38 tempest-MultipleCreateTestJSON-937113120 tempest-MultipleCreateTestJSON-937113120-project-member] [instance: 85c6b6af-f1b5-47ca-8e02-61c3669ec02b] Instance failed to spawn: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1178.890386] env[61962]: Faults: ['InvalidArgument'] [ 1178.890386] env[61962]: ERROR nova.compute.manager [instance: 85c6b6af-f1b5-47ca-8e02-61c3669ec02b] Traceback (most recent call last): [ 1178.890386] env[61962]: ERROR nova.compute.manager [instance: 85c6b6af-f1b5-47ca-8e02-61c3669ec02b] File "/opt/stack/nova/nova/compute/manager.py", line 2886, in _build_resources [ 1178.890386] env[61962]: ERROR nova.compute.manager [instance: 85c6b6af-f1b5-47ca-8e02-61c3669ec02b] yield resources [ 1178.890386] env[61962]: ERROR nova.compute.manager [instance: 85c6b6af-f1b5-47ca-8e02-61c3669ec02b] File "/opt/stack/nova/nova/compute/manager.py", line 2633, in _build_and_run_instance [ 1178.890386] env[61962]: ERROR nova.compute.manager [instance: 85c6b6af-f1b5-47ca-8e02-61c3669ec02b] self.driver.spawn(context, instance, image_meta, [ 1178.890386] env[61962]: ERROR nova.compute.manager [instance: 85c6b6af-f1b5-47ca-8e02-61c3669ec02b] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 1178.890386] env[61962]: ERROR nova.compute.manager [instance: 85c6b6af-f1b5-47ca-8e02-61c3669ec02b] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1178.890386] env[61962]: ERROR nova.compute.manager [instance: 85c6b6af-f1b5-47ca-8e02-61c3669ec02b] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 1178.890386] env[61962]: ERROR nova.compute.manager [instance: 85c6b6af-f1b5-47ca-8e02-61c3669ec02b] self._fetch_image_if_missing(context, vi) [ 1178.890386] env[61962]: ERROR nova.compute.manager [instance: 85c6b6af-f1b5-47ca-8e02-61c3669ec02b] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 1178.890910] env[61962]: ERROR nova.compute.manager [instance: 85c6b6af-f1b5-47ca-8e02-61c3669ec02b] image_cache(vi, tmp_image_ds_loc) [ 1178.890910] env[61962]: ERROR nova.compute.manager [instance: 85c6b6af-f1b5-47ca-8e02-61c3669ec02b] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 1178.890910] env[61962]: ERROR nova.compute.manager [instance: 85c6b6af-f1b5-47ca-8e02-61c3669ec02b] vm_util.copy_virtual_disk( [ 1178.890910] env[61962]: ERROR nova.compute.manager [instance: 85c6b6af-f1b5-47ca-8e02-61c3669ec02b] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 1178.890910] env[61962]: ERROR nova.compute.manager [instance: 85c6b6af-f1b5-47ca-8e02-61c3669ec02b] session._wait_for_task(vmdk_copy_task) [ 1178.890910] env[61962]: ERROR nova.compute.manager [instance: 85c6b6af-f1b5-47ca-8e02-61c3669ec02b] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 1178.890910] env[61962]: ERROR nova.compute.manager [instance: 85c6b6af-f1b5-47ca-8e02-61c3669ec02b] return self.wait_for_task(task_ref) [ 1178.890910] env[61962]: ERROR nova.compute.manager [instance: 85c6b6af-f1b5-47ca-8e02-61c3669ec02b] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 1178.890910] env[61962]: ERROR nova.compute.manager [instance: 85c6b6af-f1b5-47ca-8e02-61c3669ec02b] return evt.wait() [ 1178.890910] env[61962]: ERROR nova.compute.manager [instance: 85c6b6af-f1b5-47ca-8e02-61c3669ec02b] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 1178.890910] env[61962]: ERROR nova.compute.manager [instance: 85c6b6af-f1b5-47ca-8e02-61c3669ec02b] result = hub.switch() [ 1178.890910] env[61962]: ERROR nova.compute.manager [instance: 85c6b6af-f1b5-47ca-8e02-61c3669ec02b] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 1178.890910] env[61962]: ERROR nova.compute.manager [instance: 85c6b6af-f1b5-47ca-8e02-61c3669ec02b] return self.greenlet.switch() [ 1178.891391] env[61962]: ERROR nova.compute.manager [instance: 85c6b6af-f1b5-47ca-8e02-61c3669ec02b] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 1178.891391] env[61962]: ERROR nova.compute.manager [instance: 85c6b6af-f1b5-47ca-8e02-61c3669ec02b] self.f(*self.args, **self.kw) [ 1178.891391] env[61962]: ERROR nova.compute.manager [instance: 85c6b6af-f1b5-47ca-8e02-61c3669ec02b] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 1178.891391] env[61962]: ERROR nova.compute.manager [instance: 85c6b6af-f1b5-47ca-8e02-61c3669ec02b] raise exceptions.translate_fault(task_info.error) [ 1178.891391] env[61962]: ERROR nova.compute.manager [instance: 85c6b6af-f1b5-47ca-8e02-61c3669ec02b] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1178.891391] env[61962]: ERROR nova.compute.manager [instance: 85c6b6af-f1b5-47ca-8e02-61c3669ec02b] Faults: ['InvalidArgument'] [ 1178.891391] env[61962]: ERROR nova.compute.manager [instance: 85c6b6af-f1b5-47ca-8e02-61c3669ec02b] [ 1178.891391] env[61962]: INFO nova.compute.manager [None req-2457412b-4345-46bb-8773-263f9b3b3e38 tempest-MultipleCreateTestJSON-937113120 tempest-MultipleCreateTestJSON-937113120-project-member] [instance: 85c6b6af-f1b5-47ca-8e02-61c3669ec02b] Terminating instance [ 1178.892853] env[61962]: DEBUG oslo_concurrency.lockutils [None req-2457412b-4345-46bb-8773-263f9b3b3e38 tempest-MultipleCreateTestJSON-937113120 tempest-MultipleCreateTestJSON-937113120-project-member] Acquired lock "[datastore2] devstack-image-cache_base/f684bb17-3ab2-4bd5-a19e-4c36c57d0ebe/f684bb17-3ab2-4bd5-a19e-4c36c57d0ebe.vmdk" {{(pid=61962) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1178.893133] env[61962]: DEBUG nova.virt.vmwareapi.ds_util [None req-2457412b-4345-46bb-8773-263f9b3b3e38 tempest-MultipleCreateTestJSON-937113120 tempest-MultipleCreateTestJSON-937113120-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=61962) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1178.893805] env[61962]: DEBUG nova.compute.manager [None req-2457412b-4345-46bb-8773-263f9b3b3e38 tempest-MultipleCreateTestJSON-937113120 tempest-MultipleCreateTestJSON-937113120-project-member] [instance: 85c6b6af-f1b5-47ca-8e02-61c3669ec02b] Start destroying the instance on the hypervisor. {{(pid=61962) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3142}} [ 1178.894041] env[61962]: DEBUG nova.virt.vmwareapi.vmops [None req-2457412b-4345-46bb-8773-263f9b3b3e38 tempest-MultipleCreateTestJSON-937113120 tempest-MultipleCreateTestJSON-937113120-project-member] [instance: 85c6b6af-f1b5-47ca-8e02-61c3669ec02b] Destroying instance {{(pid=61962) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1178.894284] env[61962]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-3a67a70f-7486-4c8d-8401-5fa493f1ebc1 {{(pid=61962) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1178.896865] env[61962]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-df71c044-aa6c-48de-bdcc-a2b0729d2bec {{(pid=61962) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1178.905087] env[61962]: DEBUG nova.virt.vmwareapi.vmops [None req-2457412b-4345-46bb-8773-263f9b3b3e38 tempest-MultipleCreateTestJSON-937113120 tempest-MultipleCreateTestJSON-937113120-project-member] [instance: 85c6b6af-f1b5-47ca-8e02-61c3669ec02b] Unregistering the VM {{(pid=61962) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 1178.905342] env[61962]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-1f657910-a547-405f-959a-1831cb9c471d {{(pid=61962) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1178.908016] env[61962]: DEBUG nova.virt.vmwareapi.ds_util [None req-2457412b-4345-46bb-8773-263f9b3b3e38 tempest-MultipleCreateTestJSON-937113120 tempest-MultipleCreateTestJSON-937113120-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=61962) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1178.908208] env[61962]: DEBUG nova.virt.vmwareapi.vmops [None req-2457412b-4345-46bb-8773-263f9b3b3e38 tempest-MultipleCreateTestJSON-937113120 tempest-MultipleCreateTestJSON-937113120-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=61962) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 1178.909230] env[61962]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-2a232f0a-305b-4a98-bb50-0598bccbfbaa {{(pid=61962) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1178.914678] env[61962]: DEBUG oslo_vmware.api [None req-2457412b-4345-46bb-8773-263f9b3b3e38 tempest-MultipleCreateTestJSON-937113120 tempest-MultipleCreateTestJSON-937113120-project-member] Waiting for the task: (returnval){ [ 1178.914678] env[61962]: value = "session[5210918f-aadc-9b29-42fa-0929c2e42627]52e7b536-4c6a-a50a-a11f-50d568a7c405" [ 1178.914678] env[61962]: _type = "Task" [ 1178.914678] env[61962]: } to complete. {{(pid=61962) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1178.923349] env[61962]: DEBUG oslo_vmware.api [None req-2457412b-4345-46bb-8773-263f9b3b3e38 tempest-MultipleCreateTestJSON-937113120 tempest-MultipleCreateTestJSON-937113120-project-member] Task: {'id': session[5210918f-aadc-9b29-42fa-0929c2e42627]52e7b536-4c6a-a50a-a11f-50d568a7c405, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61962) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1178.998457] env[61962]: DEBUG nova.virt.vmwareapi.vmops [None req-2457412b-4345-46bb-8773-263f9b3b3e38 tempest-MultipleCreateTestJSON-937113120 tempest-MultipleCreateTestJSON-937113120-project-member] [instance: 85c6b6af-f1b5-47ca-8e02-61c3669ec02b] Unregistered the VM {{(pid=61962) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 1178.998719] env[61962]: DEBUG nova.virt.vmwareapi.vmops [None req-2457412b-4345-46bb-8773-263f9b3b3e38 tempest-MultipleCreateTestJSON-937113120 tempest-MultipleCreateTestJSON-937113120-project-member] [instance: 85c6b6af-f1b5-47ca-8e02-61c3669ec02b] Deleting contents of the VM from datastore datastore2 {{(pid=61962) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 1178.998940] env[61962]: DEBUG nova.virt.vmwareapi.ds_util [None req-2457412b-4345-46bb-8773-263f9b3b3e38 tempest-MultipleCreateTestJSON-937113120 tempest-MultipleCreateTestJSON-937113120-project-member] Deleting the datastore file [datastore2] 85c6b6af-f1b5-47ca-8e02-61c3669ec02b {{(pid=61962) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1178.999505] env[61962]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-63180c02-4910-454a-9ca5-1a259123ede1 {{(pid=61962) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1179.007039] env[61962]: DEBUG oslo_vmware.api [None req-2457412b-4345-46bb-8773-263f9b3b3e38 tempest-MultipleCreateTestJSON-937113120 tempest-MultipleCreateTestJSON-937113120-project-member] Waiting for the task: (returnval){ [ 1179.007039] env[61962]: value = "task-4892010" [ 1179.007039] env[61962]: _type = "Task" [ 1179.007039] env[61962]: } to complete. {{(pid=61962) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1179.016201] env[61962]: DEBUG oslo_vmware.api [None req-2457412b-4345-46bb-8773-263f9b3b3e38 tempest-MultipleCreateTestJSON-937113120 tempest-MultipleCreateTestJSON-937113120-project-member] Task: {'id': task-4892010, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61962) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1179.426317] env[61962]: DEBUG nova.virt.vmwareapi.vmops [None req-2457412b-4345-46bb-8773-263f9b3b3e38 tempest-MultipleCreateTestJSON-937113120 tempest-MultipleCreateTestJSON-937113120-project-member] [instance: 0aa340f7-a219-47f5-9d7d-dd4062643cf7] Preparing fetch location {{(pid=61962) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 1179.426657] env[61962]: DEBUG nova.virt.vmwareapi.ds_util [None req-2457412b-4345-46bb-8773-263f9b3b3e38 tempest-MultipleCreateTestJSON-937113120 tempest-MultipleCreateTestJSON-937113120-project-member] Creating directory with path [datastore2] vmware_temp/b0341d0c-210f-44d2-821e-697d4feca71b/f684bb17-3ab2-4bd5-a19e-4c36c57d0ebe {{(pid=61962) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1179.426962] env[61962]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-fc9d1d02-e070-4073-8403-717f22adfac0 {{(pid=61962) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1179.444463] env[61962]: DEBUG nova.virt.vmwareapi.ds_util [None req-2457412b-4345-46bb-8773-263f9b3b3e38 tempest-MultipleCreateTestJSON-937113120 tempest-MultipleCreateTestJSON-937113120-project-member] Created directory with path [datastore2] vmware_temp/b0341d0c-210f-44d2-821e-697d4feca71b/f684bb17-3ab2-4bd5-a19e-4c36c57d0ebe {{(pid=61962) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1179.444653] env[61962]: DEBUG nova.virt.vmwareapi.vmops [None req-2457412b-4345-46bb-8773-263f9b3b3e38 tempest-MultipleCreateTestJSON-937113120 tempest-MultipleCreateTestJSON-937113120-project-member] [instance: 0aa340f7-a219-47f5-9d7d-dd4062643cf7] Fetch image to [datastore2] vmware_temp/b0341d0c-210f-44d2-821e-697d4feca71b/f684bb17-3ab2-4bd5-a19e-4c36c57d0ebe/tmp-sparse.vmdk {{(pid=61962) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 1179.444882] env[61962]: DEBUG nova.virt.vmwareapi.vmops [None req-2457412b-4345-46bb-8773-263f9b3b3e38 tempest-MultipleCreateTestJSON-937113120 tempest-MultipleCreateTestJSON-937113120-project-member] [instance: 0aa340f7-a219-47f5-9d7d-dd4062643cf7] Downloading image file data f684bb17-3ab2-4bd5-a19e-4c36c57d0ebe to [datastore2] vmware_temp/b0341d0c-210f-44d2-821e-697d4feca71b/f684bb17-3ab2-4bd5-a19e-4c36c57d0ebe/tmp-sparse.vmdk on the data store datastore2 {{(pid=61962) _fetch_image_as_file /opt/stack/nova/nova/virt/vmwareapi/vmops.py:399}} [ 1179.445748] env[61962]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-73575f8c-2030-47f0-aff0-42c47b87086b {{(pid=61962) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1179.454025] env[61962]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b5a2c14b-b51c-465c-a9fe-762119ef3a46 {{(pid=61962) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1179.465081] env[61962]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e156876a-fb98-4bf7-b442-2b25724e14c8 {{(pid=61962) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1179.499211] env[61962]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d5c91e7f-5ecb-43c4-82c7-bf3848b4d346 {{(pid=61962) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1179.506487] env[61962]: DEBUG oslo_vmware.service [-] Invoking SessionManager.AcquireGenericServiceTicket with opID=oslo.vmware-c31c39df-d443-479d-8902-677bd803a2a2 {{(pid=61962) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1179.517612] env[61962]: DEBUG oslo_vmware.api [None req-2457412b-4345-46bb-8773-263f9b3b3e38 tempest-MultipleCreateTestJSON-937113120 tempest-MultipleCreateTestJSON-937113120-project-member] Task: {'id': task-4892010, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.421887} completed successfully. {{(pid=61962) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1179.517922] env[61962]: DEBUG nova.virt.vmwareapi.ds_util [None req-2457412b-4345-46bb-8773-263f9b3b3e38 tempest-MultipleCreateTestJSON-937113120 tempest-MultipleCreateTestJSON-937113120-project-member] Deleted the datastore file {{(pid=61962) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1179.518246] env[61962]: DEBUG nova.virt.vmwareapi.vmops [None req-2457412b-4345-46bb-8773-263f9b3b3e38 tempest-MultipleCreateTestJSON-937113120 tempest-MultipleCreateTestJSON-937113120-project-member] [instance: 85c6b6af-f1b5-47ca-8e02-61c3669ec02b] Deleted contents of the VM from datastore datastore2 {{(pid=61962) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 1179.518519] env[61962]: DEBUG nova.virt.vmwareapi.vmops [None req-2457412b-4345-46bb-8773-263f9b3b3e38 tempest-MultipleCreateTestJSON-937113120 tempest-MultipleCreateTestJSON-937113120-project-member] [instance: 85c6b6af-f1b5-47ca-8e02-61c3669ec02b] Instance destroyed {{(pid=61962) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1179.518769] env[61962]: INFO nova.compute.manager [None req-2457412b-4345-46bb-8773-263f9b3b3e38 tempest-MultipleCreateTestJSON-937113120 tempest-MultipleCreateTestJSON-937113120-project-member] [instance: 85c6b6af-f1b5-47ca-8e02-61c3669ec02b] Took 0.62 seconds to destroy the instance on the hypervisor. [ 1179.521230] env[61962]: DEBUG nova.compute.claims [None req-2457412b-4345-46bb-8773-263f9b3b3e38 tempest-MultipleCreateTestJSON-937113120 tempest-MultipleCreateTestJSON-937113120-project-member] [instance: 85c6b6af-f1b5-47ca-8e02-61c3669ec02b] Aborting claim: {{(pid=61962) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 1179.521396] env[61962]: DEBUG oslo_concurrency.lockutils [None req-2457412b-4345-46bb-8773-263f9b3b3e38 tempest-MultipleCreateTestJSON-937113120 tempest-MultipleCreateTestJSON-937113120-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=61962) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1179.521605] env[61962]: DEBUG oslo_concurrency.lockutils [None req-2457412b-4345-46bb-8773-263f9b3b3e38 tempest-MultipleCreateTestJSON-937113120 tempest-MultipleCreateTestJSON-937113120-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 0.000s {{(pid=61962) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1179.539380] env[61962]: DEBUG nova.virt.vmwareapi.images [None req-2457412b-4345-46bb-8773-263f9b3b3e38 tempest-MultipleCreateTestJSON-937113120 tempest-MultipleCreateTestJSON-937113120-project-member] [instance: 0aa340f7-a219-47f5-9d7d-dd4062643cf7] Downloading image file data f684bb17-3ab2-4bd5-a19e-4c36c57d0ebe to the data store datastore2 {{(pid=61962) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:245}} [ 1179.746770] env[61962]: DEBUG oslo_vmware.rw_handles [None req-2457412b-4345-46bb-8773-263f9b3b3e38 tempest-MultipleCreateTestJSON-937113120 tempest-MultipleCreateTestJSON-937113120-project-member] Creating HTTP connection to write to file with size = 21318656 and URL = https://esx7c2n1.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/b0341d0c-210f-44d2-821e-697d4feca71b/f684bb17-3ab2-4bd5-a19e-4c36c57d0ebe/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=61962) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 1179.806670] env[61962]: DEBUG oslo_vmware.rw_handles [None req-2457412b-4345-46bb-8773-263f9b3b3e38 tempest-MultipleCreateTestJSON-937113120 tempest-MultipleCreateTestJSON-937113120-project-member] Completed reading data from the image iterator. {{(pid=61962) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 1179.806914] env[61962]: DEBUG oslo_vmware.rw_handles [None req-2457412b-4345-46bb-8773-263f9b3b3e38 tempest-MultipleCreateTestJSON-937113120 tempest-MultipleCreateTestJSON-937113120-project-member] Closing write handle for https://esx7c2n1.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/b0341d0c-210f-44d2-821e-697d4feca71b/f684bb17-3ab2-4bd5-a19e-4c36c57d0ebe/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=61962) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:281}} [ 1180.017923] env[61962]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6d111bd4-11df-4508-b849-31c936dea2f8 {{(pid=61962) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1180.026135] env[61962]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8ebaa887-5495-43a8-9e46-58edebeefbac {{(pid=61962) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1180.057056] env[61962]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-39ef5744-0c98-4ce0-8538-e16ff7d5bab1 {{(pid=61962) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1180.065313] env[61962]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-495e927d-787a-4ab0-b4d3-1b4b164ed353 {{(pid=61962) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1180.080607] env[61962]: DEBUG nova.compute.provider_tree [None req-2457412b-4345-46bb-8773-263f9b3b3e38 tempest-MultipleCreateTestJSON-937113120 tempest-MultipleCreateTestJSON-937113120-project-member] Inventory has not changed in ProviderTree for provider: 5a20dc57-fddd-49ec-bab5-953a03eebaa1 {{(pid=61962) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1180.089218] env[61962]: DEBUG nova.scheduler.client.report [None req-2457412b-4345-46bb-8773-263f9b3b3e38 tempest-MultipleCreateTestJSON-937113120 tempest-MultipleCreateTestJSON-937113120-project-member] Inventory has not changed for provider 5a20dc57-fddd-49ec-bab5-953a03eebaa1 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 96, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61962) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1180.105670] env[61962]: DEBUG oslo_concurrency.lockutils [None req-2457412b-4345-46bb-8773-263f9b3b3e38 tempest-MultipleCreateTestJSON-937113120 tempest-MultipleCreateTestJSON-937113120-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 0.584s {{(pid=61962) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1180.106324] env[61962]: ERROR nova.compute.manager [None req-2457412b-4345-46bb-8773-263f9b3b3e38 tempest-MultipleCreateTestJSON-937113120 tempest-MultipleCreateTestJSON-937113120-project-member] [instance: 85c6b6af-f1b5-47ca-8e02-61c3669ec02b] Failed to build and run instance: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1180.106324] env[61962]: Faults: ['InvalidArgument'] [ 1180.106324] env[61962]: ERROR nova.compute.manager [instance: 85c6b6af-f1b5-47ca-8e02-61c3669ec02b] Traceback (most recent call last): [ 1180.106324] env[61962]: ERROR nova.compute.manager [instance: 85c6b6af-f1b5-47ca-8e02-61c3669ec02b] File "/opt/stack/nova/nova/compute/manager.py", line 2633, in _build_and_run_instance [ 1180.106324] env[61962]: ERROR nova.compute.manager [instance: 85c6b6af-f1b5-47ca-8e02-61c3669ec02b] self.driver.spawn(context, instance, image_meta, [ 1180.106324] env[61962]: ERROR nova.compute.manager [instance: 85c6b6af-f1b5-47ca-8e02-61c3669ec02b] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 1180.106324] env[61962]: ERROR nova.compute.manager [instance: 85c6b6af-f1b5-47ca-8e02-61c3669ec02b] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1180.106324] env[61962]: ERROR nova.compute.manager [instance: 85c6b6af-f1b5-47ca-8e02-61c3669ec02b] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 1180.106324] env[61962]: ERROR nova.compute.manager [instance: 85c6b6af-f1b5-47ca-8e02-61c3669ec02b] self._fetch_image_if_missing(context, vi) [ 1180.106324] env[61962]: ERROR nova.compute.manager [instance: 85c6b6af-f1b5-47ca-8e02-61c3669ec02b] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 1180.106324] env[61962]: ERROR nova.compute.manager [instance: 85c6b6af-f1b5-47ca-8e02-61c3669ec02b] image_cache(vi, tmp_image_ds_loc) [ 1180.106324] env[61962]: ERROR nova.compute.manager [instance: 85c6b6af-f1b5-47ca-8e02-61c3669ec02b] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 1180.107433] env[61962]: ERROR nova.compute.manager [instance: 85c6b6af-f1b5-47ca-8e02-61c3669ec02b] vm_util.copy_virtual_disk( [ 1180.107433] env[61962]: ERROR nova.compute.manager [instance: 85c6b6af-f1b5-47ca-8e02-61c3669ec02b] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 1180.107433] env[61962]: ERROR nova.compute.manager [instance: 85c6b6af-f1b5-47ca-8e02-61c3669ec02b] session._wait_for_task(vmdk_copy_task) [ 1180.107433] env[61962]: ERROR nova.compute.manager [instance: 85c6b6af-f1b5-47ca-8e02-61c3669ec02b] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 1180.107433] env[61962]: ERROR nova.compute.manager [instance: 85c6b6af-f1b5-47ca-8e02-61c3669ec02b] return self.wait_for_task(task_ref) [ 1180.107433] env[61962]: ERROR nova.compute.manager [instance: 85c6b6af-f1b5-47ca-8e02-61c3669ec02b] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 1180.107433] env[61962]: ERROR nova.compute.manager [instance: 85c6b6af-f1b5-47ca-8e02-61c3669ec02b] return evt.wait() [ 1180.107433] env[61962]: ERROR nova.compute.manager [instance: 85c6b6af-f1b5-47ca-8e02-61c3669ec02b] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 1180.107433] env[61962]: ERROR nova.compute.manager [instance: 85c6b6af-f1b5-47ca-8e02-61c3669ec02b] result = hub.switch() [ 1180.107433] env[61962]: ERROR nova.compute.manager [instance: 85c6b6af-f1b5-47ca-8e02-61c3669ec02b] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 1180.107433] env[61962]: ERROR nova.compute.manager [instance: 85c6b6af-f1b5-47ca-8e02-61c3669ec02b] return self.greenlet.switch() [ 1180.107433] env[61962]: ERROR nova.compute.manager [instance: 85c6b6af-f1b5-47ca-8e02-61c3669ec02b] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 1180.107433] env[61962]: ERROR nova.compute.manager [instance: 85c6b6af-f1b5-47ca-8e02-61c3669ec02b] self.f(*self.args, **self.kw) [ 1180.107786] env[61962]: ERROR nova.compute.manager [instance: 85c6b6af-f1b5-47ca-8e02-61c3669ec02b] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 1180.107786] env[61962]: ERROR nova.compute.manager [instance: 85c6b6af-f1b5-47ca-8e02-61c3669ec02b] raise exceptions.translate_fault(task_info.error) [ 1180.107786] env[61962]: ERROR nova.compute.manager [instance: 85c6b6af-f1b5-47ca-8e02-61c3669ec02b] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1180.107786] env[61962]: ERROR nova.compute.manager [instance: 85c6b6af-f1b5-47ca-8e02-61c3669ec02b] Faults: ['InvalidArgument'] [ 1180.107786] env[61962]: ERROR nova.compute.manager [instance: 85c6b6af-f1b5-47ca-8e02-61c3669ec02b] [ 1180.107786] env[61962]: DEBUG nova.compute.utils [None req-2457412b-4345-46bb-8773-263f9b3b3e38 tempest-MultipleCreateTestJSON-937113120 tempest-MultipleCreateTestJSON-937113120-project-member] [instance: 85c6b6af-f1b5-47ca-8e02-61c3669ec02b] VimFaultException {{(pid=61962) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 1180.108781] env[61962]: DEBUG nova.compute.manager [None req-2457412b-4345-46bb-8773-263f9b3b3e38 tempest-MultipleCreateTestJSON-937113120 tempest-MultipleCreateTestJSON-937113120-project-member] [instance: 85c6b6af-f1b5-47ca-8e02-61c3669ec02b] Build of instance 85c6b6af-f1b5-47ca-8e02-61c3669ec02b was re-scheduled: A specified parameter was not correct: fileType [ 1180.108781] env[61962]: Faults: ['InvalidArgument'] {{(pid=61962) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2472}} [ 1180.109211] env[61962]: DEBUG nova.compute.manager [None req-2457412b-4345-46bb-8773-263f9b3b3e38 tempest-MultipleCreateTestJSON-937113120 tempest-MultipleCreateTestJSON-937113120-project-member] [instance: 85c6b6af-f1b5-47ca-8e02-61c3669ec02b] Unplugging VIFs for instance {{(pid=61962) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:2998}} [ 1180.109418] env[61962]: DEBUG nova.compute.manager [None req-2457412b-4345-46bb-8773-263f9b3b3e38 tempest-MultipleCreateTestJSON-937113120 tempest-MultipleCreateTestJSON-937113120-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=61962) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3021}} [ 1180.109626] env[61962]: DEBUG nova.compute.manager [None req-2457412b-4345-46bb-8773-263f9b3b3e38 tempest-MultipleCreateTestJSON-937113120 tempest-MultipleCreateTestJSON-937113120-project-member] [instance: 85c6b6af-f1b5-47ca-8e02-61c3669ec02b] Deallocating network for instance {{(pid=61962) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2281}} [ 1180.109867] env[61962]: DEBUG nova.network.neutron [None req-2457412b-4345-46bb-8773-263f9b3b3e38 tempest-MultipleCreateTestJSON-937113120 tempest-MultipleCreateTestJSON-937113120-project-member] [instance: 85c6b6af-f1b5-47ca-8e02-61c3669ec02b] deallocate_for_instance() {{(pid=61962) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1180.797075] env[61962]: DEBUG nova.network.neutron [None req-2457412b-4345-46bb-8773-263f9b3b3e38 tempest-MultipleCreateTestJSON-937113120 tempest-MultipleCreateTestJSON-937113120-project-member] [instance: 85c6b6af-f1b5-47ca-8e02-61c3669ec02b] Updating instance_info_cache with network_info: [] {{(pid=61962) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1180.838734] env[61962]: INFO nova.compute.manager [None req-2457412b-4345-46bb-8773-263f9b3b3e38 tempest-MultipleCreateTestJSON-937113120 tempest-MultipleCreateTestJSON-937113120-project-member] [instance: 85c6b6af-f1b5-47ca-8e02-61c3669ec02b] Took 0.73 seconds to deallocate network for instance. [ 1181.005933] env[61962]: INFO nova.scheduler.client.report [None req-2457412b-4345-46bb-8773-263f9b3b3e38 tempest-MultipleCreateTestJSON-937113120 tempest-MultipleCreateTestJSON-937113120-project-member] Deleted allocations for instance 85c6b6af-f1b5-47ca-8e02-61c3669ec02b [ 1181.031984] env[61962]: DEBUG oslo_concurrency.lockutils [None req-2457412b-4345-46bb-8773-263f9b3b3e38 tempest-MultipleCreateTestJSON-937113120 tempest-MultipleCreateTestJSON-937113120-project-member] Lock "85c6b6af-f1b5-47ca-8e02-61c3669ec02b" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 598.865s {{(pid=61962) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1181.033203] env[61962]: DEBUG oslo_concurrency.lockutils [None req-21467c86-d0f5-467b-bc6c-783c4ad26f01 tempest-MultipleCreateTestJSON-937113120 tempest-MultipleCreateTestJSON-937113120-project-member] Lock "85c6b6af-f1b5-47ca-8e02-61c3669ec02b" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 398.093s {{(pid=61962) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1181.033447] env[61962]: DEBUG oslo_concurrency.lockutils [None req-21467c86-d0f5-467b-bc6c-783c4ad26f01 tempest-MultipleCreateTestJSON-937113120 tempest-MultipleCreateTestJSON-937113120-project-member] Acquiring lock "85c6b6af-f1b5-47ca-8e02-61c3669ec02b-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=61962) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1181.033653] env[61962]: DEBUG oslo_concurrency.lockutils [None req-21467c86-d0f5-467b-bc6c-783c4ad26f01 tempest-MultipleCreateTestJSON-937113120 tempest-MultipleCreateTestJSON-937113120-project-member] Lock "85c6b6af-f1b5-47ca-8e02-61c3669ec02b-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=61962) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1181.033817] env[61962]: DEBUG oslo_concurrency.lockutils [None req-21467c86-d0f5-467b-bc6c-783c4ad26f01 tempest-MultipleCreateTestJSON-937113120 tempest-MultipleCreateTestJSON-937113120-project-member] Lock "85c6b6af-f1b5-47ca-8e02-61c3669ec02b-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=61962) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1181.036255] env[61962]: INFO nova.compute.manager [None req-21467c86-d0f5-467b-bc6c-783c4ad26f01 tempest-MultipleCreateTestJSON-937113120 tempest-MultipleCreateTestJSON-937113120-project-member] [instance: 85c6b6af-f1b5-47ca-8e02-61c3669ec02b] Terminating instance [ 1181.038545] env[61962]: DEBUG nova.compute.manager [None req-21467c86-d0f5-467b-bc6c-783c4ad26f01 tempest-MultipleCreateTestJSON-937113120 tempest-MultipleCreateTestJSON-937113120-project-member] [instance: 85c6b6af-f1b5-47ca-8e02-61c3669ec02b] Start destroying the instance on the hypervisor. {{(pid=61962) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3142}} [ 1181.038743] env[61962]: DEBUG nova.virt.vmwareapi.vmops [None req-21467c86-d0f5-467b-bc6c-783c4ad26f01 tempest-MultipleCreateTestJSON-937113120 tempest-MultipleCreateTestJSON-937113120-project-member] [instance: 85c6b6af-f1b5-47ca-8e02-61c3669ec02b] Destroying instance {{(pid=61962) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1181.039033] env[61962]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-bfe28386-dc6f-483b-8c68-68d331b56e76 {{(pid=61962) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1181.049704] env[61962]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-45589f08-3506-47aa-aea9-607f9833b130 {{(pid=61962) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1181.061467] env[61962]: DEBUG nova.compute.manager [None req-091a91b0-398d-4b28-8365-8b8cb4b8e66d tempest-MigrationsAdminTest-5656953 tempest-MigrationsAdminTest-5656953-project-member] [instance: 98c88a0d-07d4-4bec-8722-0afe732886ac] Starting instance... {{(pid=61962) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2424}} [ 1181.089040] env[61962]: WARNING nova.virt.vmwareapi.vmops [None req-21467c86-d0f5-467b-bc6c-783c4ad26f01 tempest-MultipleCreateTestJSON-937113120 tempest-MultipleCreateTestJSON-937113120-project-member] [instance: 85c6b6af-f1b5-47ca-8e02-61c3669ec02b] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 85c6b6af-f1b5-47ca-8e02-61c3669ec02b could not be found. [ 1181.089298] env[61962]: DEBUG nova.virt.vmwareapi.vmops [None req-21467c86-d0f5-467b-bc6c-783c4ad26f01 tempest-MultipleCreateTestJSON-937113120 tempest-MultipleCreateTestJSON-937113120-project-member] [instance: 85c6b6af-f1b5-47ca-8e02-61c3669ec02b] Instance destroyed {{(pid=61962) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1181.089524] env[61962]: INFO nova.compute.manager [None req-21467c86-d0f5-467b-bc6c-783c4ad26f01 tempest-MultipleCreateTestJSON-937113120 tempest-MultipleCreateTestJSON-937113120-project-member] [instance: 85c6b6af-f1b5-47ca-8e02-61c3669ec02b] Took 0.05 seconds to destroy the instance on the hypervisor. [ 1181.089841] env[61962]: DEBUG oslo.service.loopingcall [None req-21467c86-d0f5-467b-bc6c-783c4ad26f01 tempest-MultipleCreateTestJSON-937113120 tempest-MultipleCreateTestJSON-937113120-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61962) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1181.090271] env[61962]: DEBUG nova.compute.manager [-] [instance: 85c6b6af-f1b5-47ca-8e02-61c3669ec02b] Deallocating network for instance {{(pid=61962) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2281}} [ 1181.090427] env[61962]: DEBUG nova.network.neutron [-] [instance: 85c6b6af-f1b5-47ca-8e02-61c3669ec02b] deallocate_for_instance() {{(pid=61962) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1181.096569] env[61962]: DEBUG nova.compute.manager [None req-091a91b0-398d-4b28-8365-8b8cb4b8e66d tempest-MigrationsAdminTest-5656953 tempest-MigrationsAdminTest-5656953-project-member] [instance: 98c88a0d-07d4-4bec-8722-0afe732886ac] Instance disappeared before build. {{(pid=61962) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2431}} [ 1181.132317] env[61962]: DEBUG oslo_concurrency.lockutils [None req-091a91b0-398d-4b28-8365-8b8cb4b8e66d tempest-MigrationsAdminTest-5656953 tempest-MigrationsAdminTest-5656953-project-member] Lock "98c88a0d-07d4-4bec-8722-0afe732886ac" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 225.227s {{(pid=61962) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1181.137257] env[61962]: DEBUG nova.network.neutron [-] [instance: 85c6b6af-f1b5-47ca-8e02-61c3669ec02b] Updating instance_info_cache with network_info: [] {{(pid=61962) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1181.144038] env[61962]: INFO nova.compute.manager [-] [instance: 85c6b6af-f1b5-47ca-8e02-61c3669ec02b] Took 0.05 seconds to deallocate network for instance. [ 1181.144415] env[61962]: DEBUG nova.compute.manager [None req-5481884c-a19d-4a9d-9904-b78ff198a034 tempest-ServerGroupTestJSON-904706614 tempest-ServerGroupTestJSON-904706614-project-member] [instance: b42bffa2-bf04-4287-9ae8-8cbbe107893a] Starting instance... {{(pid=61962) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2424}} [ 1181.178791] env[61962]: DEBUG nova.compute.manager [None req-5481884c-a19d-4a9d-9904-b78ff198a034 tempest-ServerGroupTestJSON-904706614 tempest-ServerGroupTestJSON-904706614-project-member] [instance: b42bffa2-bf04-4287-9ae8-8cbbe107893a] Instance disappeared before build. {{(pid=61962) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2431}} [ 1181.210032] env[61962]: DEBUG oslo_concurrency.lockutils [None req-5481884c-a19d-4a9d-9904-b78ff198a034 tempest-ServerGroupTestJSON-904706614 tempest-ServerGroupTestJSON-904706614-project-member] Lock "b42bffa2-bf04-4287-9ae8-8cbbe107893a" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 203.873s {{(pid=61962) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1181.222931] env[61962]: DEBUG nova.compute.manager [None req-e0071317-e8e9-4b26-bfd6-1ee93b62c913 tempest-DeleteServersAdminTestJSON-710750285 tempest-DeleteServersAdminTestJSON-710750285-project-member] [instance: a47d03b4-71aa-43e9-bfa4-d2a07ea89fb8] Starting instance... {{(pid=61962) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2424}} [ 1181.259179] env[61962]: DEBUG nova.compute.manager [None req-e0071317-e8e9-4b26-bfd6-1ee93b62c913 tempest-DeleteServersAdminTestJSON-710750285 tempest-DeleteServersAdminTestJSON-710750285-project-member] [instance: a47d03b4-71aa-43e9-bfa4-d2a07ea89fb8] Instance disappeared before build. {{(pid=61962) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2431}} [ 1181.282723] env[61962]: DEBUG oslo_concurrency.lockutils [None req-21467c86-d0f5-467b-bc6c-783c4ad26f01 tempest-MultipleCreateTestJSON-937113120 tempest-MultipleCreateTestJSON-937113120-project-member] Lock "85c6b6af-f1b5-47ca-8e02-61c3669ec02b" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 0.249s {{(pid=61962) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1181.284415] env[61962]: DEBUG oslo_concurrency.lockutils [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Lock "85c6b6af-f1b5-47ca-8e02-61c3669ec02b" acquired by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: waited 50.768s {{(pid=61962) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1181.284611] env[61962]: INFO nova.compute.manager [None req-ba337279-4320-40ca-b616-7e590432f203 None None] [instance: 85c6b6af-f1b5-47ca-8e02-61c3669ec02b] During sync_power_state the instance has a pending task (deleting). Skip. [ 1181.284906] env[61962]: DEBUG oslo_concurrency.lockutils [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Lock "85c6b6af-f1b5-47ca-8e02-61c3669ec02b" "released" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: held 0.001s {{(pid=61962) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1181.287185] env[61962]: DEBUG oslo_concurrency.lockutils [None req-e0071317-e8e9-4b26-bfd6-1ee93b62c913 tempest-DeleteServersAdminTestJSON-710750285 tempest-DeleteServersAdminTestJSON-710750285-project-member] Lock "a47d03b4-71aa-43e9-bfa4-d2a07ea89fb8" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 202.640s {{(pid=61962) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1181.300065] env[61962]: DEBUG nova.compute.manager [None req-557bce10-54a3-4a5d-b0cf-bd9d351fb139 tempest-ServerAddressesTestJSON-194346181 tempest-ServerAddressesTestJSON-194346181-project-member] [instance: 2a085633-9439-464b-b0c7-00e68ed46fb3] Starting instance... {{(pid=61962) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2424}} [ 1181.351761] env[61962]: DEBUG nova.compute.manager [None req-557bce10-54a3-4a5d-b0cf-bd9d351fb139 tempest-ServerAddressesTestJSON-194346181 tempest-ServerAddressesTestJSON-194346181-project-member] [instance: 2a085633-9439-464b-b0c7-00e68ed46fb3] Instance disappeared before build. {{(pid=61962) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2431}} [ 1181.382514] env[61962]: DEBUG oslo_concurrency.lockutils [None req-557bce10-54a3-4a5d-b0cf-bd9d351fb139 tempest-ServerAddressesTestJSON-194346181 tempest-ServerAddressesTestJSON-194346181-project-member] Lock "2a085633-9439-464b-b0c7-00e68ed46fb3" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 197.772s {{(pid=61962) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1181.394576] env[61962]: DEBUG nova.compute.manager [None req-315218da-fc87-4c93-928d-220a133db47c tempest-ServerShowV254Test-238986200 tempest-ServerShowV254Test-238986200-project-member] [instance: d970d822-12ab-43cc-8d34-abc6eff8a68d] Starting instance... {{(pid=61962) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2424}} [ 1181.466019] env[61962]: DEBUG oslo_concurrency.lockutils [None req-315218da-fc87-4c93-928d-220a133db47c tempest-ServerShowV254Test-238986200 tempest-ServerShowV254Test-238986200-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61962) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1181.466019] env[61962]: DEBUG oslo_concurrency.lockutils [None req-315218da-fc87-4c93-928d-220a133db47c tempest-ServerShowV254Test-238986200 tempest-ServerShowV254Test-238986200-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=61962) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1181.466019] env[61962]: INFO nova.compute.claims [None req-315218da-fc87-4c93-928d-220a133db47c tempest-ServerShowV254Test-238986200 tempest-ServerShowV254Test-238986200-project-member] [instance: d970d822-12ab-43cc-8d34-abc6eff8a68d] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1181.859436] env[61962]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-46b6487d-854b-43d2-9156-19f8976d13cd {{(pid=61962) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1181.868630] env[61962]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4b7e6a0c-2d91-46b4-bd7a-6ddf5635a2b2 {{(pid=61962) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1181.907296] env[61962]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-80f233ad-0e42-4ee0-accf-96da1ce59e68 {{(pid=61962) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1181.916080] env[61962]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4b836560-f338-4d84-9b12-f1f2f9a52ad0 {{(pid=61962) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1181.931759] env[61962]: DEBUG nova.compute.provider_tree [None req-315218da-fc87-4c93-928d-220a133db47c tempest-ServerShowV254Test-238986200 tempest-ServerShowV254Test-238986200-project-member] Inventory has not changed in ProviderTree for provider: 5a20dc57-fddd-49ec-bab5-953a03eebaa1 {{(pid=61962) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1181.944578] env[61962]: DEBUG nova.scheduler.client.report [None req-315218da-fc87-4c93-928d-220a133db47c tempest-ServerShowV254Test-238986200 tempest-ServerShowV254Test-238986200-project-member] Inventory has not changed for provider 5a20dc57-fddd-49ec-bab5-953a03eebaa1 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 96, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61962) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1181.965470] env[61962]: DEBUG oslo_concurrency.lockutils [None req-315218da-fc87-4c93-928d-220a133db47c tempest-ServerShowV254Test-238986200 tempest-ServerShowV254Test-238986200-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.501s {{(pid=61962) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1181.965970] env[61962]: DEBUG nova.compute.manager [None req-315218da-fc87-4c93-928d-220a133db47c tempest-ServerShowV254Test-238986200 tempest-ServerShowV254Test-238986200-project-member] [instance: d970d822-12ab-43cc-8d34-abc6eff8a68d] Start building networks asynchronously for instance. {{(pid=61962) _build_resources /opt/stack/nova/nova/compute/manager.py:2821}} [ 1182.013124] env[61962]: DEBUG nova.compute.utils [None req-315218da-fc87-4c93-928d-220a133db47c tempest-ServerShowV254Test-238986200 tempest-ServerShowV254Test-238986200-project-member] Using /dev/sd instead of None {{(pid=61962) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1182.013824] env[61962]: DEBUG nova.compute.manager [None req-315218da-fc87-4c93-928d-220a133db47c tempest-ServerShowV254Test-238986200 tempest-ServerShowV254Test-238986200-project-member] [instance: d970d822-12ab-43cc-8d34-abc6eff8a68d] Not allocating networking since 'none' was specified. {{(pid=61962) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1969}} [ 1182.028816] env[61962]: DEBUG nova.compute.manager [None req-315218da-fc87-4c93-928d-220a133db47c tempest-ServerShowV254Test-238986200 tempest-ServerShowV254Test-238986200-project-member] [instance: d970d822-12ab-43cc-8d34-abc6eff8a68d] Start building block device mappings for instance. {{(pid=61962) _build_resources /opt/stack/nova/nova/compute/manager.py:2856}} [ 1182.119010] env[61962]: DEBUG nova.compute.manager [None req-315218da-fc87-4c93-928d-220a133db47c tempest-ServerShowV254Test-238986200 tempest-ServerShowV254Test-238986200-project-member] [instance: d970d822-12ab-43cc-8d34-abc6eff8a68d] Start spawning the instance on the hypervisor. {{(pid=61962) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2630}} [ 1182.155612] env[61962]: DEBUG nova.virt.hardware [None req-315218da-fc87-4c93-928d-220a133db47c tempest-ServerShowV254Test-238986200 tempest-ServerShowV254Test-238986200-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-12-01T12:09:23Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=128,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-12-01T12:09:07Z,direct_url=,disk_format='vmdk',id=f684bb17-3ab2-4bd5-a19e-4c36c57d0ebe,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='eef556fb5c4f49b889491ae31a496de5',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-12-01T12:09:08Z,virtual_size=,visibility=), allow threads: False {{(pid=61962) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1182.155612] env[61962]: DEBUG nova.virt.hardware [None req-315218da-fc87-4c93-928d-220a133db47c tempest-ServerShowV254Test-238986200 tempest-ServerShowV254Test-238986200-project-member] Flavor limits 0:0:0 {{(pid=61962) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1182.155803] env[61962]: DEBUG nova.virt.hardware [None req-315218da-fc87-4c93-928d-220a133db47c tempest-ServerShowV254Test-238986200 tempest-ServerShowV254Test-238986200-project-member] Image limits 0:0:0 {{(pid=61962) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1182.156154] env[61962]: DEBUG nova.virt.hardware [None req-315218da-fc87-4c93-928d-220a133db47c tempest-ServerShowV254Test-238986200 tempest-ServerShowV254Test-238986200-project-member] Flavor pref 0:0:0 {{(pid=61962) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1182.156360] env[61962]: DEBUG nova.virt.hardware [None req-315218da-fc87-4c93-928d-220a133db47c tempest-ServerShowV254Test-238986200 tempest-ServerShowV254Test-238986200-project-member] Image pref 0:0:0 {{(pid=61962) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1182.156515] env[61962]: DEBUG nova.virt.hardware [None req-315218da-fc87-4c93-928d-220a133db47c tempest-ServerShowV254Test-238986200 tempest-ServerShowV254Test-238986200-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61962) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1182.156733] env[61962]: DEBUG nova.virt.hardware [None req-315218da-fc87-4c93-928d-220a133db47c tempest-ServerShowV254Test-238986200 tempest-ServerShowV254Test-238986200-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61962) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1182.156915] env[61962]: DEBUG nova.virt.hardware [None req-315218da-fc87-4c93-928d-220a133db47c tempest-ServerShowV254Test-238986200 tempest-ServerShowV254Test-238986200-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61962) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1182.157105] env[61962]: DEBUG nova.virt.hardware [None req-315218da-fc87-4c93-928d-220a133db47c tempest-ServerShowV254Test-238986200 tempest-ServerShowV254Test-238986200-project-member] Got 1 possible topologies {{(pid=61962) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1182.157230] env[61962]: DEBUG nova.virt.hardware [None req-315218da-fc87-4c93-928d-220a133db47c tempest-ServerShowV254Test-238986200 tempest-ServerShowV254Test-238986200-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61962) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1182.157452] env[61962]: DEBUG nova.virt.hardware [None req-315218da-fc87-4c93-928d-220a133db47c tempest-ServerShowV254Test-238986200 tempest-ServerShowV254Test-238986200-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61962) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1182.158645] env[61962]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3f6ecc39-60ca-4f4c-80fd-8ea0e485c80a {{(pid=61962) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1182.167671] env[61962]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-be8bf326-cd9d-4c8c-9313-9487ce6b0f88 {{(pid=61962) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1182.182868] env[61962]: DEBUG nova.virt.vmwareapi.vmops [None req-315218da-fc87-4c93-928d-220a133db47c tempest-ServerShowV254Test-238986200 tempest-ServerShowV254Test-238986200-project-member] [instance: d970d822-12ab-43cc-8d34-abc6eff8a68d] Instance VIF info [] {{(pid=61962) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1182.188720] env[61962]: DEBUG nova.virt.vmwareapi.vm_util [None req-315218da-fc87-4c93-928d-220a133db47c tempest-ServerShowV254Test-238986200 tempest-ServerShowV254Test-238986200-project-member] Creating folder: Project (696cc1bc78cb4511a76f788d82895fef). Parent ref: group-v953327. {{(pid=61962) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 1182.189067] env[61962]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-6814ef30-1942-4755-b98f-e35879c155ab {{(pid=61962) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1182.201069] env[61962]: INFO nova.virt.vmwareapi.vm_util [None req-315218da-fc87-4c93-928d-220a133db47c tempest-ServerShowV254Test-238986200 tempest-ServerShowV254Test-238986200-project-member] Created folder: Project (696cc1bc78cb4511a76f788d82895fef) in parent group-v953327. [ 1182.201293] env[61962]: DEBUG nova.virt.vmwareapi.vm_util [None req-315218da-fc87-4c93-928d-220a133db47c tempest-ServerShowV254Test-238986200 tempest-ServerShowV254Test-238986200-project-member] Creating folder: Instances. Parent ref: group-v953394. {{(pid=61962) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 1182.201538] env[61962]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-fa51c31b-dd93-412d-a877-189b518823d2 {{(pid=61962) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1182.212150] env[61962]: INFO nova.virt.vmwareapi.vm_util [None req-315218da-fc87-4c93-928d-220a133db47c tempest-ServerShowV254Test-238986200 tempest-ServerShowV254Test-238986200-project-member] Created folder: Instances in parent group-v953394. [ 1182.212351] env[61962]: DEBUG oslo.service.loopingcall [None req-315218da-fc87-4c93-928d-220a133db47c tempest-ServerShowV254Test-238986200 tempest-ServerShowV254Test-238986200-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=61962) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1182.212561] env[61962]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: d970d822-12ab-43cc-8d34-abc6eff8a68d] Creating VM on the ESX host {{(pid=61962) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 1182.212786] env[61962]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-3d140198-522b-447f-ac3e-3e0340abe901 {{(pid=61962) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1182.230887] env[61962]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1182.230887] env[61962]: value = "task-4892013" [ 1182.230887] env[61962]: _type = "Task" [ 1182.230887] env[61962]: } to complete. {{(pid=61962) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1182.241015] env[61962]: DEBUG oslo_vmware.api [-] Task: {'id': task-4892013, 'name': CreateVM_Task} progress is 0%. {{(pid=61962) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1182.749700] env[61962]: DEBUG oslo_vmware.api [-] Task: {'id': task-4892013, 'name': CreateVM_Task, 'duration_secs': 0.346389} completed successfully. {{(pid=61962) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1182.749700] env[61962]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: d970d822-12ab-43cc-8d34-abc6eff8a68d] Created VM on the ESX host {{(pid=61962) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 1182.749700] env[61962]: DEBUG oslo_concurrency.lockutils [None req-315218da-fc87-4c93-928d-220a133db47c tempest-ServerShowV254Test-238986200 tempest-ServerShowV254Test-238986200-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/f684bb17-3ab2-4bd5-a19e-4c36c57d0ebe" {{(pid=61962) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1182.750404] env[61962]: DEBUG oslo_concurrency.lockutils [None req-315218da-fc87-4c93-928d-220a133db47c tempest-ServerShowV254Test-238986200 tempest-ServerShowV254Test-238986200-project-member] Acquired lock "[datastore2] devstack-image-cache_base/f684bb17-3ab2-4bd5-a19e-4c36c57d0ebe" {{(pid=61962) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1182.751092] env[61962]: DEBUG oslo_concurrency.lockutils [None req-315218da-fc87-4c93-928d-220a133db47c tempest-ServerShowV254Test-238986200 tempest-ServerShowV254Test-238986200-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/f684bb17-3ab2-4bd5-a19e-4c36c57d0ebe" {{(pid=61962) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1182.751865] env[61962]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-c463c558-ca5c-4b72-b975-a715fe127de8 {{(pid=61962) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1182.758693] env[61962]: DEBUG oslo_vmware.api [None req-315218da-fc87-4c93-928d-220a133db47c tempest-ServerShowV254Test-238986200 tempest-ServerShowV254Test-238986200-project-member] Waiting for the task: (returnval){ [ 1182.758693] env[61962]: value = "session[5210918f-aadc-9b29-42fa-0929c2e42627]52a2c40c-71bb-e51e-3322-41bf69f2dcd9" [ 1182.758693] env[61962]: _type = "Task" [ 1182.758693] env[61962]: } to complete. {{(pid=61962) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1182.775431] env[61962]: DEBUG oslo_vmware.api [None req-315218da-fc87-4c93-928d-220a133db47c tempest-ServerShowV254Test-238986200 tempest-ServerShowV254Test-238986200-project-member] Task: {'id': session[5210918f-aadc-9b29-42fa-0929c2e42627]52a2c40c-71bb-e51e-3322-41bf69f2dcd9, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61962) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1183.272807] env[61962]: DEBUG oslo_concurrency.lockutils [None req-315218da-fc87-4c93-928d-220a133db47c tempest-ServerShowV254Test-238986200 tempest-ServerShowV254Test-238986200-project-member] Releasing lock "[datastore2] devstack-image-cache_base/f684bb17-3ab2-4bd5-a19e-4c36c57d0ebe" {{(pid=61962) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1183.273096] env[61962]: DEBUG nova.virt.vmwareapi.vmops [None req-315218da-fc87-4c93-928d-220a133db47c tempest-ServerShowV254Test-238986200 tempest-ServerShowV254Test-238986200-project-member] [instance: d970d822-12ab-43cc-8d34-abc6eff8a68d] Processing image f684bb17-3ab2-4bd5-a19e-4c36c57d0ebe {{(pid=61962) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1183.273341] env[61962]: DEBUG oslo_concurrency.lockutils [None req-315218da-fc87-4c93-928d-220a133db47c tempest-ServerShowV254Test-238986200 tempest-ServerShowV254Test-238986200-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/f684bb17-3ab2-4bd5-a19e-4c36c57d0ebe/f684bb17-3ab2-4bd5-a19e-4c36c57d0ebe.vmdk" {{(pid=61962) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1194.069161] env[61962]: DEBUG oslo_concurrency.lockutils [None req-9dfce35c-cfdc-4c34-8fd9-1d51d314d579 tempest-AttachInterfacesTestJSON-87713823 tempest-AttachInterfacesTestJSON-87713823-project-member] Acquiring lock "6f0c884b-afb3-4d06-9dd2-f07099c364e0" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61962) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1194.069457] env[61962]: DEBUG oslo_concurrency.lockutils [None req-9dfce35c-cfdc-4c34-8fd9-1d51d314d579 tempest-AttachInterfacesTestJSON-87713823 tempest-AttachInterfacesTestJSON-87713823-project-member] Lock "6f0c884b-afb3-4d06-9dd2-f07099c364e0" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61962) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1195.412367] env[61962]: DEBUG oslo_concurrency.lockutils [None req-e6a2f4e5-1dbe-4445-8444-ef14363e60d0 tempest-ServerShowV254Test-238986200 tempest-ServerShowV254Test-238986200-project-member] Acquiring lock "d970d822-12ab-43cc-8d34-abc6eff8a68d" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=61962) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1197.810714] env[61962]: DEBUG oslo_concurrency.lockutils [None req-792ae9a9-0dda-4d94-b11f-0fec504684c6 tempest-ServerRescueNegativeTestJSON-1391422182 tempest-ServerRescueNegativeTestJSON-1391422182-project-member] Acquiring lock "2d9a9155-1dbb-4984-99dd-d40552eef7db" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61962) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1197.811095] env[61962]: DEBUG oslo_concurrency.lockutils [None req-792ae9a9-0dda-4d94-b11f-0fec504684c6 tempest-ServerRescueNegativeTestJSON-1391422182 tempest-ServerRescueNegativeTestJSON-1391422182-project-member] Lock "2d9a9155-1dbb-4984-99dd-d40552eef7db" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61962) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1198.280355] env[61962]: DEBUG oslo_concurrency.lockutils [None req-59aee293-c4f4-48db-95b3-8f2c81fcf69f tempest-ServerRescueNegativeTestJSON-1391422182 tempest-ServerRescueNegativeTestJSON-1391422182-project-member] Acquiring lock "a2209177-23ac-4367-9736-2978ffd6fb39" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61962) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1198.280591] env[61962]: DEBUG oslo_concurrency.lockutils [None req-59aee293-c4f4-48db-95b3-8f2c81fcf69f tempest-ServerRescueNegativeTestJSON-1391422182 tempest-ServerRescueNegativeTestJSON-1391422182-project-member] Lock "a2209177-23ac-4367-9736-2978ffd6fb39" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61962) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1210.509568] env[61962]: DEBUG oslo_concurrency.lockutils [None req-9eb76157-2b61-4f0b-aec0-84ea1a13fdbd tempest-AttachVolumeNegativeTest-1446100430 tempest-AttachVolumeNegativeTest-1446100430-project-member] Acquiring lock "5b261fba-bbdf-4ba9-a065-e833f6e44ba2" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61962) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1210.509780] env[61962]: DEBUG oslo_concurrency.lockutils [None req-9eb76157-2b61-4f0b-aec0-84ea1a13fdbd tempest-AttachVolumeNegativeTest-1446100430 tempest-AttachVolumeNegativeTest-1446100430-project-member] Lock "5b261fba-bbdf-4ba9-a065-e833f6e44ba2" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61962) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1221.930941] env[61962]: DEBUG oslo_concurrency.lockutils [None req-daf488bf-6beb-4599-adaa-f153a1040e49 tempest-InstanceActionsNegativeTestJSON-360330678 tempest-InstanceActionsNegativeTestJSON-360330678-project-member] Acquiring lock "4453ec6d-57f7-4ae1-bafa-4dd993c22d03" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61962) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1221.931386] env[61962]: DEBUG oslo_concurrency.lockutils [None req-daf488bf-6beb-4599-adaa-f153a1040e49 tempest-InstanceActionsNegativeTestJSON-360330678 tempest-InstanceActionsNegativeTestJSON-360330678-project-member] Lock "4453ec6d-57f7-4ae1-bafa-4dd993c22d03" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61962) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1222.163009] env[61962]: DEBUG oslo_service.periodic_task [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Running periodic task ComputeManager.update_available_resource {{(pid=61962) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1222.178451] env[61962]: DEBUG oslo_concurrency.lockutils [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=61962) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1222.178451] env[61962]: DEBUG oslo_concurrency.lockutils [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=61962) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1222.178451] env[61962]: DEBUG oslo_concurrency.lockutils [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=61962) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1222.178451] env[61962]: DEBUG nova.compute.resource_tracker [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=61962) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 1222.178973] env[61962]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5b264140-dc45-443d-8652-48aa1d604fa1 {{(pid=61962) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1222.191123] env[61962]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-418a98e2-d3f0-47d1-b386-2d0f8d6540e4 {{(pid=61962) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1222.210026] env[61962]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9c88903c-36eb-43e4-a744-4441ab0d8344 {{(pid=61962) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1222.217148] env[61962]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e9d0111c-ccd7-4716-9650-06d7f5b80818 {{(pid=61962) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1222.255071] env[61962]: DEBUG nova.compute.resource_tracker [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=180572MB free_disk=96GB free_vcpus=48 pci_devices=None {{(pid=61962) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 1222.255230] env[61962]: DEBUG oslo_concurrency.lockutils [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=61962) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1222.255386] env[61962]: DEBUG oslo_concurrency.lockutils [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=61962) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1222.343681] env[61962]: DEBUG nova.compute.resource_tracker [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Instance 0aa340f7-a219-47f5-9d7d-dd4062643cf7 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61962) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 1222.343853] env[61962]: DEBUG nova.compute.resource_tracker [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Instance ff7d9da5-bc9a-41f2-a2e4-521d87b22117 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61962) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 1222.343983] env[61962]: DEBUG nova.compute.resource_tracker [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Instance bf82c44c-95ce-4e4c-a32f-444bba1ef5cd actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61962) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 1222.344124] env[61962]: DEBUG nova.compute.resource_tracker [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Instance 1dcafe3b-989b-4290-be54-ceccc62a323f actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61962) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 1222.344248] env[61962]: DEBUG nova.compute.resource_tracker [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Instance 454eebcc-ea18-4046-ba2e-01de1c8a954a actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61962) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 1222.344368] env[61962]: DEBUG nova.compute.resource_tracker [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Instance e76b5908-3bad-4c19-90e7-b15ca253ad80 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61962) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 1222.344487] env[61962]: DEBUG nova.compute.resource_tracker [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Instance 8ecd73a7-7f23-4994-85b9-479584831687 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61962) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 1222.344601] env[61962]: DEBUG nova.compute.resource_tracker [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Instance d89abe69-dd71-4cfe-b80b-3b2aeba0b8a9 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61962) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 1222.344715] env[61962]: DEBUG nova.compute.resource_tracker [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Instance 5572a1d3-5d2f-4af5-b9f6-ec57506e2069 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61962) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 1222.344829] env[61962]: DEBUG nova.compute.resource_tracker [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Instance d970d822-12ab-43cc-8d34-abc6eff8a68d actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61962) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 1222.356888] env[61962]: DEBUG nova.compute.resource_tracker [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Instance b3143d91-7fe7-45cf-9b90-c84d989394a3 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61962) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1767}} [ 1222.370738] env[61962]: DEBUG nova.compute.resource_tracker [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Instance 35bf25a7-a3c2-40f1-b415-42d6167b0a1c has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61962) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1767}} [ 1222.383050] env[61962]: DEBUG nova.compute.resource_tracker [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Instance 1f7429fe-e6f3-4b0a-bae8-38ead4607ae7 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61962) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1767}} [ 1222.395508] env[61962]: DEBUG nova.compute.resource_tracker [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Instance c3c36949-754e-4bab-860d-74e0cde4dfd6 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61962) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1767}} [ 1222.409135] env[61962]: DEBUG nova.compute.resource_tracker [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Instance 75aed9fe-6947-46b3-9c7d-838bf0f108af has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61962) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1767}} [ 1222.421351] env[61962]: DEBUG nova.compute.resource_tracker [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Instance 0b089824-a1ec-476a-932a-6cba27a09e4e has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61962) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1767}} [ 1222.436318] env[61962]: DEBUG nova.compute.resource_tracker [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Instance 0fd2335c-e9e9-41bb-b77a-22b6f2c47946 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61962) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1767}} [ 1222.450298] env[61962]: DEBUG nova.compute.resource_tracker [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Instance 4d2bbdf5-0581-4a85-b962-36686a464bff has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61962) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1767}} [ 1222.474106] env[61962]: DEBUG nova.compute.resource_tracker [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Instance 989e57f2-6dac-4734-b895-5724b1c7a06b has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61962) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1767}} [ 1222.516179] env[61962]: DEBUG nova.compute.resource_tracker [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Instance 6f0c884b-afb3-4d06-9dd2-f07099c364e0 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61962) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1767}} [ 1222.530412] env[61962]: DEBUG nova.compute.resource_tracker [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Instance 2d9a9155-1dbb-4984-99dd-d40552eef7db has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61962) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1767}} [ 1222.546022] env[61962]: DEBUG nova.compute.resource_tracker [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Instance a2209177-23ac-4367-9736-2978ffd6fb39 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61962) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1767}} [ 1222.563638] env[61962]: DEBUG nova.compute.resource_tracker [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Instance 5b261fba-bbdf-4ba9-a065-e833f6e44ba2 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61962) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1767}} [ 1222.576616] env[61962]: DEBUG nova.compute.resource_tracker [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Instance 4453ec6d-57f7-4ae1-bafa-4dd993c22d03 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61962) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1767}} [ 1222.576873] env[61962]: DEBUG nova.compute.resource_tracker [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Total usable vcpus: 48, total allocated vcpus: 10 {{(pid=61962) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 1222.577051] env[61962]: DEBUG nova.compute.resource_tracker [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=1792MB phys_disk=100GB used_disk=10GB total_vcpus=48 used_vcpus=10 pci_stats=[] stats={'failed_builds': '47', 'num_instances': '10', 'num_vm_building': '10', 'num_task_deleting': '10', 'num_os_type_None': '10', 'num_proj_ed943c69c73d48e2b1b2bab311295d84': '1', 'io_workload': '10', 'num_proj_5481169540de4205a1ff7731132eb3a7': '1', 'num_proj_389fb8d4c2ce47829fe23a6ca60e9bdd': '1', 'num_proj_7738f0dd32484bf9978c5d006d088c3b': '1', 'num_proj_a31ea93dd586435ebb82c1dbfd14c2ad': '1', 'num_proj_3285f5bbd10d495e852d0ef0c0047847': '1', 'num_proj_8e79e89d66b44fa092d586334732c0be': '1', 'num_proj_e58b68272c404983993a09ad12e8c03b': '1', 'num_proj_99c377cc8b5545a2b6e712291b14cb04': '1', 'num_proj_696cc1bc78cb4511a76f788d82895fef': '1'} {{(pid=61962) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 1223.051024] env[61962]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1f503cbc-d114-4d87-b001-756412bd17df {{(pid=61962) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1223.059921] env[61962]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-def1be4a-f7ff-4c3d-996d-9a2351c4aa2f {{(pid=61962) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1223.090490] env[61962]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fe3ea2ae-19ba-4263-9451-847300704204 {{(pid=61962) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1223.100702] env[61962]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4ad8d4e7-1b18-4c2b-8edb-397ff1d374fd {{(pid=61962) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1223.118382] env[61962]: DEBUG nova.compute.provider_tree [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Inventory has not changed in ProviderTree for provider: 5a20dc57-fddd-49ec-bab5-953a03eebaa1 {{(pid=61962) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1223.127718] env[61962]: DEBUG nova.scheduler.client.report [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Inventory has not changed for provider 5a20dc57-fddd-49ec-bab5-953a03eebaa1 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 96, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61962) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1223.148854] env[61962]: DEBUG nova.compute.resource_tracker [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=61962) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 1223.148982] env[61962]: DEBUG oslo_concurrency.lockutils [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 0.894s {{(pid=61962) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1224.146304] env[61962]: DEBUG oslo_service.periodic_task [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=61962) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1224.146633] env[61962]: DEBUG nova.compute.manager [None req-ba337279-4320-40ca-b616-7e590432f203 None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=61962) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10626}} [ 1225.213295] env[61962]: DEBUG oslo_concurrency.lockutils [None req-804cf4ae-423f-40bf-ba35-ecefab5e1417 tempest-ServerDiskConfigTestJSON-1190480345 tempest-ServerDiskConfigTestJSON-1190480345-project-member] Acquiring lock "be426e8c-aac5-4510-a9e2-20cc79a00cae" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61962) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1225.213670] env[61962]: DEBUG oslo_concurrency.lockutils [None req-804cf4ae-423f-40bf-ba35-ecefab5e1417 tempest-ServerDiskConfigTestJSON-1190480345 tempest-ServerDiskConfigTestJSON-1190480345-project-member] Lock "be426e8c-aac5-4510-a9e2-20cc79a00cae" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=61962) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1226.154362] env[61962]: DEBUG oslo_service.periodic_task [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=61962) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1226.701078] env[61962]: WARNING oslo_vmware.rw_handles [None req-2457412b-4345-46bb-8773-263f9b3b3e38 tempest-MultipleCreateTestJSON-937113120 tempest-MultipleCreateTestJSON-937113120-project-member] Error occurred while reading the HTTP response.: http.client.RemoteDisconnected: Remote end closed connection without response [ 1226.701078] env[61962]: ERROR oslo_vmware.rw_handles Traceback (most recent call last): [ 1226.701078] env[61962]: ERROR oslo_vmware.rw_handles File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py", line 283, in close [ 1226.701078] env[61962]: ERROR oslo_vmware.rw_handles self._conn.getresponse() [ 1226.701078] env[61962]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 1375, in getresponse [ 1226.701078] env[61962]: ERROR oslo_vmware.rw_handles response.begin() [ 1226.701078] env[61962]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 318, in begin [ 1226.701078] env[61962]: ERROR oslo_vmware.rw_handles version, status, reason = self._read_status() [ 1226.701078] env[61962]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 287, in _read_status [ 1226.701078] env[61962]: ERROR oslo_vmware.rw_handles raise RemoteDisconnected("Remote end closed connection without" [ 1226.701078] env[61962]: ERROR oslo_vmware.rw_handles http.client.RemoteDisconnected: Remote end closed connection without response [ 1226.701078] env[61962]: ERROR oslo_vmware.rw_handles [ 1226.701853] env[61962]: DEBUG nova.virt.vmwareapi.images [None req-2457412b-4345-46bb-8773-263f9b3b3e38 tempest-MultipleCreateTestJSON-937113120 tempest-MultipleCreateTestJSON-937113120-project-member] [instance: 0aa340f7-a219-47f5-9d7d-dd4062643cf7] Downloaded image file data f684bb17-3ab2-4bd5-a19e-4c36c57d0ebe to vmware_temp/b0341d0c-210f-44d2-821e-697d4feca71b/f684bb17-3ab2-4bd5-a19e-4c36c57d0ebe/tmp-sparse.vmdk on the data store datastore2 {{(pid=61962) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:258}} [ 1226.703634] env[61962]: DEBUG nova.virt.vmwareapi.vmops [None req-2457412b-4345-46bb-8773-263f9b3b3e38 tempest-MultipleCreateTestJSON-937113120 tempest-MultipleCreateTestJSON-937113120-project-member] [instance: 0aa340f7-a219-47f5-9d7d-dd4062643cf7] Caching image {{(pid=61962) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 1226.703887] env[61962]: DEBUG nova.virt.vmwareapi.vm_util [None req-2457412b-4345-46bb-8773-263f9b3b3e38 tempest-MultipleCreateTestJSON-937113120 tempest-MultipleCreateTestJSON-937113120-project-member] Copying Virtual Disk [datastore2] vmware_temp/b0341d0c-210f-44d2-821e-697d4feca71b/f684bb17-3ab2-4bd5-a19e-4c36c57d0ebe/tmp-sparse.vmdk to [datastore2] vmware_temp/b0341d0c-210f-44d2-821e-697d4feca71b/f684bb17-3ab2-4bd5-a19e-4c36c57d0ebe/f684bb17-3ab2-4bd5-a19e-4c36c57d0ebe.vmdk {{(pid=61962) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 1226.705571] env[61962]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-288fd466-4b98-439b-bcbb-501e574f838b {{(pid=61962) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1226.714467] env[61962]: DEBUG oslo_vmware.api [None req-2457412b-4345-46bb-8773-263f9b3b3e38 tempest-MultipleCreateTestJSON-937113120 tempest-MultipleCreateTestJSON-937113120-project-member] Waiting for the task: (returnval){ [ 1226.714467] env[61962]: value = "task-4892014" [ 1226.714467] env[61962]: _type = "Task" [ 1226.714467] env[61962]: } to complete. {{(pid=61962) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1226.725788] env[61962]: DEBUG oslo_vmware.api [None req-2457412b-4345-46bb-8773-263f9b3b3e38 tempest-MultipleCreateTestJSON-937113120 tempest-MultipleCreateTestJSON-937113120-project-member] Task: {'id': task-4892014, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61962) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1227.153530] env[61962]: DEBUG oslo_service.periodic_task [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Running periodic task ComputeManager._sync_scheduler_instance_info {{(pid=61962) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1227.225830] env[61962]: DEBUG oslo_vmware.exceptions [None req-2457412b-4345-46bb-8773-263f9b3b3e38 tempest-MultipleCreateTestJSON-937113120 tempest-MultipleCreateTestJSON-937113120-project-member] Fault InvalidArgument not matched. {{(pid=61962) get_fault_class /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/exceptions.py:290}} [ 1227.226449] env[61962]: DEBUG oslo_concurrency.lockutils [None req-2457412b-4345-46bb-8773-263f9b3b3e38 tempest-MultipleCreateTestJSON-937113120 tempest-MultipleCreateTestJSON-937113120-project-member] Releasing lock "[datastore2] devstack-image-cache_base/f684bb17-3ab2-4bd5-a19e-4c36c57d0ebe/f684bb17-3ab2-4bd5-a19e-4c36c57d0ebe.vmdk" {{(pid=61962) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1227.227189] env[61962]: ERROR nova.compute.manager [None req-2457412b-4345-46bb-8773-263f9b3b3e38 tempest-MultipleCreateTestJSON-937113120 tempest-MultipleCreateTestJSON-937113120-project-member] [instance: 0aa340f7-a219-47f5-9d7d-dd4062643cf7] Instance failed to spawn: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1227.227189] env[61962]: Faults: ['InvalidArgument'] [ 1227.227189] env[61962]: ERROR nova.compute.manager [instance: 0aa340f7-a219-47f5-9d7d-dd4062643cf7] Traceback (most recent call last): [ 1227.227189] env[61962]: ERROR nova.compute.manager [instance: 0aa340f7-a219-47f5-9d7d-dd4062643cf7] File "/opt/stack/nova/nova/compute/manager.py", line 2886, in _build_resources [ 1227.227189] env[61962]: ERROR nova.compute.manager [instance: 0aa340f7-a219-47f5-9d7d-dd4062643cf7] yield resources [ 1227.227189] env[61962]: ERROR nova.compute.manager [instance: 0aa340f7-a219-47f5-9d7d-dd4062643cf7] File "/opt/stack/nova/nova/compute/manager.py", line 2633, in _build_and_run_instance [ 1227.227189] env[61962]: ERROR nova.compute.manager [instance: 0aa340f7-a219-47f5-9d7d-dd4062643cf7] self.driver.spawn(context, instance, image_meta, [ 1227.227189] env[61962]: ERROR nova.compute.manager [instance: 0aa340f7-a219-47f5-9d7d-dd4062643cf7] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 1227.227189] env[61962]: ERROR nova.compute.manager [instance: 0aa340f7-a219-47f5-9d7d-dd4062643cf7] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1227.227189] env[61962]: ERROR nova.compute.manager [instance: 0aa340f7-a219-47f5-9d7d-dd4062643cf7] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 1227.227189] env[61962]: ERROR nova.compute.manager [instance: 0aa340f7-a219-47f5-9d7d-dd4062643cf7] self._fetch_image_if_missing(context, vi) [ 1227.227189] env[61962]: ERROR nova.compute.manager [instance: 0aa340f7-a219-47f5-9d7d-dd4062643cf7] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 1227.230223] env[61962]: ERROR nova.compute.manager [instance: 0aa340f7-a219-47f5-9d7d-dd4062643cf7] image_cache(vi, tmp_image_ds_loc) [ 1227.230223] env[61962]: ERROR nova.compute.manager [instance: 0aa340f7-a219-47f5-9d7d-dd4062643cf7] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 1227.230223] env[61962]: ERROR nova.compute.manager [instance: 0aa340f7-a219-47f5-9d7d-dd4062643cf7] vm_util.copy_virtual_disk( [ 1227.230223] env[61962]: ERROR nova.compute.manager [instance: 0aa340f7-a219-47f5-9d7d-dd4062643cf7] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 1227.230223] env[61962]: ERROR nova.compute.manager [instance: 0aa340f7-a219-47f5-9d7d-dd4062643cf7] session._wait_for_task(vmdk_copy_task) [ 1227.230223] env[61962]: ERROR nova.compute.manager [instance: 0aa340f7-a219-47f5-9d7d-dd4062643cf7] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 1227.230223] env[61962]: ERROR nova.compute.manager [instance: 0aa340f7-a219-47f5-9d7d-dd4062643cf7] return self.wait_for_task(task_ref) [ 1227.230223] env[61962]: ERROR nova.compute.manager [instance: 0aa340f7-a219-47f5-9d7d-dd4062643cf7] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 1227.230223] env[61962]: ERROR nova.compute.manager [instance: 0aa340f7-a219-47f5-9d7d-dd4062643cf7] return evt.wait() [ 1227.230223] env[61962]: ERROR nova.compute.manager [instance: 0aa340f7-a219-47f5-9d7d-dd4062643cf7] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 1227.230223] env[61962]: ERROR nova.compute.manager [instance: 0aa340f7-a219-47f5-9d7d-dd4062643cf7] result = hub.switch() [ 1227.230223] env[61962]: ERROR nova.compute.manager [instance: 0aa340f7-a219-47f5-9d7d-dd4062643cf7] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 1227.230223] env[61962]: ERROR nova.compute.manager [instance: 0aa340f7-a219-47f5-9d7d-dd4062643cf7] return self.greenlet.switch() [ 1227.230632] env[61962]: ERROR nova.compute.manager [instance: 0aa340f7-a219-47f5-9d7d-dd4062643cf7] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 1227.230632] env[61962]: ERROR nova.compute.manager [instance: 0aa340f7-a219-47f5-9d7d-dd4062643cf7] self.f(*self.args, **self.kw) [ 1227.230632] env[61962]: ERROR nova.compute.manager [instance: 0aa340f7-a219-47f5-9d7d-dd4062643cf7] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 1227.230632] env[61962]: ERROR nova.compute.manager [instance: 0aa340f7-a219-47f5-9d7d-dd4062643cf7] raise exceptions.translate_fault(task_info.error) [ 1227.230632] env[61962]: ERROR nova.compute.manager [instance: 0aa340f7-a219-47f5-9d7d-dd4062643cf7] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1227.230632] env[61962]: ERROR nova.compute.manager [instance: 0aa340f7-a219-47f5-9d7d-dd4062643cf7] Faults: ['InvalidArgument'] [ 1227.230632] env[61962]: ERROR nova.compute.manager [instance: 0aa340f7-a219-47f5-9d7d-dd4062643cf7] [ 1227.230632] env[61962]: INFO nova.compute.manager [None req-2457412b-4345-46bb-8773-263f9b3b3e38 tempest-MultipleCreateTestJSON-937113120 tempest-MultipleCreateTestJSON-937113120-project-member] [instance: 0aa340f7-a219-47f5-9d7d-dd4062643cf7] Terminating instance [ 1227.230632] env[61962]: DEBUG oslo_concurrency.lockutils [None req-7546295f-5c69-4e76-85a0-ed1a4d4adbfc tempest-ServerRescueTestJSON-985981031 tempest-ServerRescueTestJSON-985981031-project-member] Acquired lock "[datastore2] devstack-image-cache_base/f684bb17-3ab2-4bd5-a19e-4c36c57d0ebe/f684bb17-3ab2-4bd5-a19e-4c36c57d0ebe.vmdk" {{(pid=61962) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1227.230882] env[61962]: DEBUG nova.virt.vmwareapi.ds_util [None req-7546295f-5c69-4e76-85a0-ed1a4d4adbfc tempest-ServerRescueTestJSON-985981031 tempest-ServerRescueTestJSON-985981031-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=61962) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1227.231746] env[61962]: DEBUG nova.compute.manager [None req-2457412b-4345-46bb-8773-263f9b3b3e38 tempest-MultipleCreateTestJSON-937113120 tempest-MultipleCreateTestJSON-937113120-project-member] [instance: 0aa340f7-a219-47f5-9d7d-dd4062643cf7] Start destroying the instance on the hypervisor. {{(pid=61962) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3142}} [ 1227.232231] env[61962]: DEBUG nova.virt.vmwareapi.vmops [None req-2457412b-4345-46bb-8773-263f9b3b3e38 tempest-MultipleCreateTestJSON-937113120 tempest-MultipleCreateTestJSON-937113120-project-member] [instance: 0aa340f7-a219-47f5-9d7d-dd4062643cf7] Destroying instance {{(pid=61962) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1227.232618] env[61962]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-d841a4dd-ae15-4477-9764-136aff1f9158 {{(pid=61962) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1227.238652] env[61962]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5d150d6d-b77d-41e4-a8fc-e4f693bd8fbe {{(pid=61962) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1227.244193] env[61962]: DEBUG nova.virt.vmwareapi.vmops [None req-2457412b-4345-46bb-8773-263f9b3b3e38 tempest-MultipleCreateTestJSON-937113120 tempest-MultipleCreateTestJSON-937113120-project-member] [instance: 0aa340f7-a219-47f5-9d7d-dd4062643cf7] Unregistering the VM {{(pid=61962) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 1227.244193] env[61962]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-e3e51389-64ab-4f28-82be-643d8a1cf87d {{(pid=61962) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1227.246730] env[61962]: DEBUG nova.virt.vmwareapi.ds_util [None req-7546295f-5c69-4e76-85a0-ed1a4d4adbfc tempest-ServerRescueTestJSON-985981031 tempest-ServerRescueTestJSON-985981031-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=61962) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1227.247101] env[61962]: DEBUG nova.virt.vmwareapi.vmops [None req-7546295f-5c69-4e76-85a0-ed1a4d4adbfc tempest-ServerRescueTestJSON-985981031 tempest-ServerRescueTestJSON-985981031-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=61962) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 1227.250568] env[61962]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-90722000-663f-42ae-9a2c-0e777427539d {{(pid=61962) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1227.257019] env[61962]: DEBUG oslo_vmware.api [None req-7546295f-5c69-4e76-85a0-ed1a4d4adbfc tempest-ServerRescueTestJSON-985981031 tempest-ServerRescueTestJSON-985981031-project-member] Waiting for the task: (returnval){ [ 1227.257019] env[61962]: value = "session[5210918f-aadc-9b29-42fa-0929c2e42627]522b4015-dcf9-75eb-48ff-104169e6943f" [ 1227.257019] env[61962]: _type = "Task" [ 1227.257019] env[61962]: } to complete. {{(pid=61962) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1227.262194] env[61962]: DEBUG oslo_vmware.api [None req-7546295f-5c69-4e76-85a0-ed1a4d4adbfc tempest-ServerRescueTestJSON-985981031 tempest-ServerRescueTestJSON-985981031-project-member] Task: {'id': session[5210918f-aadc-9b29-42fa-0929c2e42627]522b4015-dcf9-75eb-48ff-104169e6943f, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61962) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1227.324656] env[61962]: DEBUG nova.virt.vmwareapi.vmops [None req-2457412b-4345-46bb-8773-263f9b3b3e38 tempest-MultipleCreateTestJSON-937113120 tempest-MultipleCreateTestJSON-937113120-project-member] [instance: 0aa340f7-a219-47f5-9d7d-dd4062643cf7] Unregistered the VM {{(pid=61962) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 1227.325155] env[61962]: DEBUG nova.virt.vmwareapi.vmops [None req-2457412b-4345-46bb-8773-263f9b3b3e38 tempest-MultipleCreateTestJSON-937113120 tempest-MultipleCreateTestJSON-937113120-project-member] [instance: 0aa340f7-a219-47f5-9d7d-dd4062643cf7] Deleting contents of the VM from datastore datastore2 {{(pid=61962) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 1227.325562] env[61962]: DEBUG nova.virt.vmwareapi.ds_util [None req-2457412b-4345-46bb-8773-263f9b3b3e38 tempest-MultipleCreateTestJSON-937113120 tempest-MultipleCreateTestJSON-937113120-project-member] Deleting the datastore file [datastore2] 0aa340f7-a219-47f5-9d7d-dd4062643cf7 {{(pid=61962) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1227.327021] env[61962]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-36c5db2d-5dd8-4087-94e8-d36b4dd04cf8 {{(pid=61962) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1227.333502] env[61962]: DEBUG oslo_vmware.api [None req-2457412b-4345-46bb-8773-263f9b3b3e38 tempest-MultipleCreateTestJSON-937113120 tempest-MultipleCreateTestJSON-937113120-project-member] Waiting for the task: (returnval){ [ 1227.333502] env[61962]: value = "task-4892016" [ 1227.333502] env[61962]: _type = "Task" [ 1227.333502] env[61962]: } to complete. {{(pid=61962) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1227.343753] env[61962]: DEBUG oslo_vmware.api [None req-2457412b-4345-46bb-8773-263f9b3b3e38 tempest-MultipleCreateTestJSON-937113120 tempest-MultipleCreateTestJSON-937113120-project-member] Task: {'id': task-4892016, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61962) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1227.764761] env[61962]: DEBUG nova.virt.vmwareapi.vmops [None req-7546295f-5c69-4e76-85a0-ed1a4d4adbfc tempest-ServerRescueTestJSON-985981031 tempest-ServerRescueTestJSON-985981031-project-member] [instance: ff7d9da5-bc9a-41f2-a2e4-521d87b22117] Preparing fetch location {{(pid=61962) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 1227.765044] env[61962]: DEBUG nova.virt.vmwareapi.ds_util [None req-7546295f-5c69-4e76-85a0-ed1a4d4adbfc tempest-ServerRescueTestJSON-985981031 tempest-ServerRescueTestJSON-985981031-project-member] Creating directory with path [datastore2] vmware_temp/81bd7d81-b59d-4f28-9152-5869debf50a2/f684bb17-3ab2-4bd5-a19e-4c36c57d0ebe {{(pid=61962) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1227.765658] env[61962]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-1f7c86db-6ad4-4d4a-a0ab-171c0078c12c {{(pid=61962) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1227.777710] env[61962]: DEBUG nova.virt.vmwareapi.ds_util [None req-7546295f-5c69-4e76-85a0-ed1a4d4adbfc tempest-ServerRescueTestJSON-985981031 tempest-ServerRescueTestJSON-985981031-project-member] Created directory with path [datastore2] vmware_temp/81bd7d81-b59d-4f28-9152-5869debf50a2/f684bb17-3ab2-4bd5-a19e-4c36c57d0ebe {{(pid=61962) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1227.778022] env[61962]: DEBUG nova.virt.vmwareapi.vmops [None req-7546295f-5c69-4e76-85a0-ed1a4d4adbfc tempest-ServerRescueTestJSON-985981031 tempest-ServerRescueTestJSON-985981031-project-member] [instance: ff7d9da5-bc9a-41f2-a2e4-521d87b22117] Fetch image to [datastore2] vmware_temp/81bd7d81-b59d-4f28-9152-5869debf50a2/f684bb17-3ab2-4bd5-a19e-4c36c57d0ebe/tmp-sparse.vmdk {{(pid=61962) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 1227.778148] env[61962]: DEBUG nova.virt.vmwareapi.vmops [None req-7546295f-5c69-4e76-85a0-ed1a4d4adbfc tempest-ServerRescueTestJSON-985981031 tempest-ServerRescueTestJSON-985981031-project-member] [instance: ff7d9da5-bc9a-41f2-a2e4-521d87b22117] Downloading image file data f684bb17-3ab2-4bd5-a19e-4c36c57d0ebe to [datastore2] vmware_temp/81bd7d81-b59d-4f28-9152-5869debf50a2/f684bb17-3ab2-4bd5-a19e-4c36c57d0ebe/tmp-sparse.vmdk on the data store datastore2 {{(pid=61962) _fetch_image_as_file /opt/stack/nova/nova/virt/vmwareapi/vmops.py:399}} [ 1227.779265] env[61962]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4e102ba9-5d7b-4232-ba52-0089d4b19561 {{(pid=61962) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1227.786900] env[61962]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e7cf8d4a-be40-49b3-8b25-e8ec583cd894 {{(pid=61962) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1227.797465] env[61962]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-13b61ecc-d6d5-4d77-9616-c120fa00cb83 {{(pid=61962) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1227.831386] env[61962]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-788147a7-03bc-426f-ba74-e091171a3c64 {{(pid=61962) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1227.843983] env[61962]: DEBUG oslo_vmware.api [None req-2457412b-4345-46bb-8773-263f9b3b3e38 tempest-MultipleCreateTestJSON-937113120 tempest-MultipleCreateTestJSON-937113120-project-member] Task: {'id': task-4892016, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.075782} completed successfully. {{(pid=61962) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1227.844585] env[61962]: DEBUG nova.virt.vmwareapi.ds_util [None req-2457412b-4345-46bb-8773-263f9b3b3e38 tempest-MultipleCreateTestJSON-937113120 tempest-MultipleCreateTestJSON-937113120-project-member] Deleted the datastore file {{(pid=61962) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1227.844801] env[61962]: DEBUG nova.virt.vmwareapi.vmops [None req-2457412b-4345-46bb-8773-263f9b3b3e38 tempest-MultipleCreateTestJSON-937113120 tempest-MultipleCreateTestJSON-937113120-project-member] [instance: 0aa340f7-a219-47f5-9d7d-dd4062643cf7] Deleted contents of the VM from datastore datastore2 {{(pid=61962) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 1227.844982] env[61962]: DEBUG nova.virt.vmwareapi.vmops [None req-2457412b-4345-46bb-8773-263f9b3b3e38 tempest-MultipleCreateTestJSON-937113120 tempest-MultipleCreateTestJSON-937113120-project-member] [instance: 0aa340f7-a219-47f5-9d7d-dd4062643cf7] Instance destroyed {{(pid=61962) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1227.845171] env[61962]: INFO nova.compute.manager [None req-2457412b-4345-46bb-8773-263f9b3b3e38 tempest-MultipleCreateTestJSON-937113120 tempest-MultipleCreateTestJSON-937113120-project-member] [instance: 0aa340f7-a219-47f5-9d7d-dd4062643cf7] Took 0.61 seconds to destroy the instance on the hypervisor. [ 1227.846760] env[61962]: DEBUG oslo_vmware.service [-] Invoking SessionManager.AcquireGenericServiceTicket with opID=oslo.vmware-1cf00dd0-8be7-46ad-8a86-11eacfca5180 {{(pid=61962) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1227.848929] env[61962]: DEBUG nova.compute.claims [None req-2457412b-4345-46bb-8773-263f9b3b3e38 tempest-MultipleCreateTestJSON-937113120 tempest-MultipleCreateTestJSON-937113120-project-member] [instance: 0aa340f7-a219-47f5-9d7d-dd4062643cf7] Aborting claim: {{(pid=61962) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 1227.849113] env[61962]: DEBUG oslo_concurrency.lockutils [None req-2457412b-4345-46bb-8773-263f9b3b3e38 tempest-MultipleCreateTestJSON-937113120 tempest-MultipleCreateTestJSON-937113120-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=61962) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1227.849328] env[61962]: DEBUG oslo_concurrency.lockutils [None req-2457412b-4345-46bb-8773-263f9b3b3e38 tempest-MultipleCreateTestJSON-937113120 tempest-MultipleCreateTestJSON-937113120-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 0.000s {{(pid=61962) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1227.874787] env[61962]: DEBUG nova.virt.vmwareapi.images [None req-7546295f-5c69-4e76-85a0-ed1a4d4adbfc tempest-ServerRescueTestJSON-985981031 tempest-ServerRescueTestJSON-985981031-project-member] [instance: ff7d9da5-bc9a-41f2-a2e4-521d87b22117] Downloading image file data f684bb17-3ab2-4bd5-a19e-4c36c57d0ebe to the data store datastore2 {{(pid=61962) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:245}} [ 1228.092433] env[61962]: DEBUG oslo_vmware.rw_handles [None req-7546295f-5c69-4e76-85a0-ed1a4d4adbfc tempest-ServerRescueTestJSON-985981031 tempest-ServerRescueTestJSON-985981031-project-member] Creating HTTP connection to write to file with size = 21318656 and URL = https://esx7c2n1.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/81bd7d81-b59d-4f28-9152-5869debf50a2/f684bb17-3ab2-4bd5-a19e-4c36c57d0ebe/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=61962) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 1228.157658] env[61962]: DEBUG oslo_service.periodic_task [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=61962) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1228.160275] env[61962]: DEBUG oslo_vmware.rw_handles [None req-7546295f-5c69-4e76-85a0-ed1a4d4adbfc tempest-ServerRescueTestJSON-985981031 tempest-ServerRescueTestJSON-985981031-project-member] Completed reading data from the image iterator. {{(pid=61962) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 1228.160717] env[61962]: DEBUG oslo_vmware.rw_handles [None req-7546295f-5c69-4e76-85a0-ed1a4d4adbfc tempest-ServerRescueTestJSON-985981031 tempest-ServerRescueTestJSON-985981031-project-member] Closing write handle for https://esx7c2n1.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/81bd7d81-b59d-4f28-9152-5869debf50a2/f684bb17-3ab2-4bd5-a19e-4c36c57d0ebe/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=61962) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:281}} [ 1228.372232] env[61962]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c9593f77-ff2b-4316-850d-14b6e4a756a3 {{(pid=61962) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1228.382418] env[61962]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-056f19f6-a736-4d9c-bbad-c376e4350961 {{(pid=61962) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1228.419378] env[61962]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a819e33a-5328-4ece-868c-de03d5575f96 {{(pid=61962) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1228.427177] env[61962]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4bd742c6-5747-43c2-84c6-0d4deea21784 {{(pid=61962) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1228.443193] env[61962]: DEBUG nova.compute.provider_tree [None req-2457412b-4345-46bb-8773-263f9b3b3e38 tempest-MultipleCreateTestJSON-937113120 tempest-MultipleCreateTestJSON-937113120-project-member] Inventory has not changed in ProviderTree for provider: 5a20dc57-fddd-49ec-bab5-953a03eebaa1 {{(pid=61962) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1228.458502] env[61962]: DEBUG nova.scheduler.client.report [None req-2457412b-4345-46bb-8773-263f9b3b3e38 tempest-MultipleCreateTestJSON-937113120 tempest-MultipleCreateTestJSON-937113120-project-member] Inventory has not changed for provider 5a20dc57-fddd-49ec-bab5-953a03eebaa1 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 96, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61962) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1228.475839] env[61962]: DEBUG oslo_concurrency.lockutils [None req-2457412b-4345-46bb-8773-263f9b3b3e38 tempest-MultipleCreateTestJSON-937113120 tempest-MultipleCreateTestJSON-937113120-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 0.626s {{(pid=61962) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1228.476461] env[61962]: ERROR nova.compute.manager [None req-2457412b-4345-46bb-8773-263f9b3b3e38 tempest-MultipleCreateTestJSON-937113120 tempest-MultipleCreateTestJSON-937113120-project-member] [instance: 0aa340f7-a219-47f5-9d7d-dd4062643cf7] Failed to build and run instance: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1228.476461] env[61962]: Faults: ['InvalidArgument'] [ 1228.476461] env[61962]: ERROR nova.compute.manager [instance: 0aa340f7-a219-47f5-9d7d-dd4062643cf7] Traceback (most recent call last): [ 1228.476461] env[61962]: ERROR nova.compute.manager [instance: 0aa340f7-a219-47f5-9d7d-dd4062643cf7] File "/opt/stack/nova/nova/compute/manager.py", line 2633, in _build_and_run_instance [ 1228.476461] env[61962]: ERROR nova.compute.manager [instance: 0aa340f7-a219-47f5-9d7d-dd4062643cf7] self.driver.spawn(context, instance, image_meta, [ 1228.476461] env[61962]: ERROR nova.compute.manager [instance: 0aa340f7-a219-47f5-9d7d-dd4062643cf7] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 1228.476461] env[61962]: ERROR nova.compute.manager [instance: 0aa340f7-a219-47f5-9d7d-dd4062643cf7] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1228.476461] env[61962]: ERROR nova.compute.manager [instance: 0aa340f7-a219-47f5-9d7d-dd4062643cf7] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 1228.476461] env[61962]: ERROR nova.compute.manager [instance: 0aa340f7-a219-47f5-9d7d-dd4062643cf7] self._fetch_image_if_missing(context, vi) [ 1228.476461] env[61962]: ERROR nova.compute.manager [instance: 0aa340f7-a219-47f5-9d7d-dd4062643cf7] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 1228.476461] env[61962]: ERROR nova.compute.manager [instance: 0aa340f7-a219-47f5-9d7d-dd4062643cf7] image_cache(vi, tmp_image_ds_loc) [ 1228.476461] env[61962]: ERROR nova.compute.manager [instance: 0aa340f7-a219-47f5-9d7d-dd4062643cf7] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 1228.477278] env[61962]: ERROR nova.compute.manager [instance: 0aa340f7-a219-47f5-9d7d-dd4062643cf7] vm_util.copy_virtual_disk( [ 1228.477278] env[61962]: ERROR nova.compute.manager [instance: 0aa340f7-a219-47f5-9d7d-dd4062643cf7] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 1228.477278] env[61962]: ERROR nova.compute.manager [instance: 0aa340f7-a219-47f5-9d7d-dd4062643cf7] session._wait_for_task(vmdk_copy_task) [ 1228.477278] env[61962]: ERROR nova.compute.manager [instance: 0aa340f7-a219-47f5-9d7d-dd4062643cf7] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 1228.477278] env[61962]: ERROR nova.compute.manager [instance: 0aa340f7-a219-47f5-9d7d-dd4062643cf7] return self.wait_for_task(task_ref) [ 1228.477278] env[61962]: ERROR nova.compute.manager [instance: 0aa340f7-a219-47f5-9d7d-dd4062643cf7] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 1228.477278] env[61962]: ERROR nova.compute.manager [instance: 0aa340f7-a219-47f5-9d7d-dd4062643cf7] return evt.wait() [ 1228.477278] env[61962]: ERROR nova.compute.manager [instance: 0aa340f7-a219-47f5-9d7d-dd4062643cf7] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 1228.477278] env[61962]: ERROR nova.compute.manager [instance: 0aa340f7-a219-47f5-9d7d-dd4062643cf7] result = hub.switch() [ 1228.477278] env[61962]: ERROR nova.compute.manager [instance: 0aa340f7-a219-47f5-9d7d-dd4062643cf7] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 1228.477278] env[61962]: ERROR nova.compute.manager [instance: 0aa340f7-a219-47f5-9d7d-dd4062643cf7] return self.greenlet.switch() [ 1228.477278] env[61962]: ERROR nova.compute.manager [instance: 0aa340f7-a219-47f5-9d7d-dd4062643cf7] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 1228.477278] env[61962]: ERROR nova.compute.manager [instance: 0aa340f7-a219-47f5-9d7d-dd4062643cf7] self.f(*self.args, **self.kw) [ 1228.477841] env[61962]: ERROR nova.compute.manager [instance: 0aa340f7-a219-47f5-9d7d-dd4062643cf7] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 1228.477841] env[61962]: ERROR nova.compute.manager [instance: 0aa340f7-a219-47f5-9d7d-dd4062643cf7] raise exceptions.translate_fault(task_info.error) [ 1228.477841] env[61962]: ERROR nova.compute.manager [instance: 0aa340f7-a219-47f5-9d7d-dd4062643cf7] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1228.477841] env[61962]: ERROR nova.compute.manager [instance: 0aa340f7-a219-47f5-9d7d-dd4062643cf7] Faults: ['InvalidArgument'] [ 1228.477841] env[61962]: ERROR nova.compute.manager [instance: 0aa340f7-a219-47f5-9d7d-dd4062643cf7] [ 1228.477841] env[61962]: DEBUG nova.compute.utils [None req-2457412b-4345-46bb-8773-263f9b3b3e38 tempest-MultipleCreateTestJSON-937113120 tempest-MultipleCreateTestJSON-937113120-project-member] [instance: 0aa340f7-a219-47f5-9d7d-dd4062643cf7] VimFaultException {{(pid=61962) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 1228.479698] env[61962]: DEBUG nova.compute.manager [None req-2457412b-4345-46bb-8773-263f9b3b3e38 tempest-MultipleCreateTestJSON-937113120 tempest-MultipleCreateTestJSON-937113120-project-member] [instance: 0aa340f7-a219-47f5-9d7d-dd4062643cf7] Build of instance 0aa340f7-a219-47f5-9d7d-dd4062643cf7 was re-scheduled: A specified parameter was not correct: fileType [ 1228.479698] env[61962]: Faults: ['InvalidArgument'] {{(pid=61962) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2472}} [ 1228.480449] env[61962]: DEBUG nova.compute.manager [None req-2457412b-4345-46bb-8773-263f9b3b3e38 tempest-MultipleCreateTestJSON-937113120 tempest-MultipleCreateTestJSON-937113120-project-member] [instance: 0aa340f7-a219-47f5-9d7d-dd4062643cf7] Unplugging VIFs for instance {{(pid=61962) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:2998}} [ 1228.480449] env[61962]: DEBUG nova.compute.manager [None req-2457412b-4345-46bb-8773-263f9b3b3e38 tempest-MultipleCreateTestJSON-937113120 tempest-MultipleCreateTestJSON-937113120-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=61962) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3021}} [ 1228.480608] env[61962]: DEBUG nova.compute.manager [None req-2457412b-4345-46bb-8773-263f9b3b3e38 tempest-MultipleCreateTestJSON-937113120 tempest-MultipleCreateTestJSON-937113120-project-member] [instance: 0aa340f7-a219-47f5-9d7d-dd4062643cf7] Deallocating network for instance {{(pid=61962) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2281}} [ 1228.480652] env[61962]: DEBUG nova.network.neutron [None req-2457412b-4345-46bb-8773-263f9b3b3e38 tempest-MultipleCreateTestJSON-937113120 tempest-MultipleCreateTestJSON-937113120-project-member] [instance: 0aa340f7-a219-47f5-9d7d-dd4062643cf7] deallocate_for_instance() {{(pid=61962) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1228.549746] env[61962]: DEBUG oslo_concurrency.lockutils [None req-e8b3351b-9bef-4d51-80fa-bf03ad584b12 tempest-ServersNegativeTestJSON-1349715905 tempest-ServersNegativeTestJSON-1349715905-project-member] Acquiring lock "e62eace3-3320-441e-becb-abff7cdd6660" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61962) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1228.551018] env[61962]: DEBUG oslo_concurrency.lockutils [None req-e8b3351b-9bef-4d51-80fa-bf03ad584b12 tempest-ServersNegativeTestJSON-1349715905 tempest-ServersNegativeTestJSON-1349715905-project-member] Lock "e62eace3-3320-441e-becb-abff7cdd6660" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61962) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1228.931440] env[61962]: DEBUG nova.network.neutron [None req-2457412b-4345-46bb-8773-263f9b3b3e38 tempest-MultipleCreateTestJSON-937113120 tempest-MultipleCreateTestJSON-937113120-project-member] [instance: 0aa340f7-a219-47f5-9d7d-dd4062643cf7] Updating instance_info_cache with network_info: [] {{(pid=61962) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1228.974604] env[61962]: INFO nova.compute.manager [None req-2457412b-4345-46bb-8773-263f9b3b3e38 tempest-MultipleCreateTestJSON-937113120 tempest-MultipleCreateTestJSON-937113120-project-member] [instance: 0aa340f7-a219-47f5-9d7d-dd4062643cf7] Took 0.49 seconds to deallocate network for instance. [ 1229.091023] env[61962]: INFO nova.scheduler.client.report [None req-2457412b-4345-46bb-8773-263f9b3b3e38 tempest-MultipleCreateTestJSON-937113120 tempest-MultipleCreateTestJSON-937113120-project-member] Deleted allocations for instance 0aa340f7-a219-47f5-9d7d-dd4062643cf7 [ 1229.115156] env[61962]: DEBUG oslo_concurrency.lockutils [None req-2457412b-4345-46bb-8773-263f9b3b3e38 tempest-MultipleCreateTestJSON-937113120 tempest-MultipleCreateTestJSON-937113120-project-member] Lock "0aa340f7-a219-47f5-9d7d-dd4062643cf7" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 646.886s {{(pid=61962) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1229.116370] env[61962]: DEBUG oslo_concurrency.lockutils [None req-be4f8e28-478c-4391-80f6-60642ffadc1e tempest-MultipleCreateTestJSON-937113120 tempest-MultipleCreateTestJSON-937113120-project-member] Lock "0aa340f7-a219-47f5-9d7d-dd4062643cf7" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 446.602s {{(pid=61962) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1229.116584] env[61962]: DEBUG oslo_concurrency.lockutils [None req-be4f8e28-478c-4391-80f6-60642ffadc1e tempest-MultipleCreateTestJSON-937113120 tempest-MultipleCreateTestJSON-937113120-project-member] Acquiring lock "0aa340f7-a219-47f5-9d7d-dd4062643cf7-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=61962) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1229.116923] env[61962]: DEBUG oslo_concurrency.lockutils [None req-be4f8e28-478c-4391-80f6-60642ffadc1e tempest-MultipleCreateTestJSON-937113120 tempest-MultipleCreateTestJSON-937113120-project-member] Lock "0aa340f7-a219-47f5-9d7d-dd4062643cf7-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=61962) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1229.116923] env[61962]: DEBUG oslo_concurrency.lockutils [None req-be4f8e28-478c-4391-80f6-60642ffadc1e tempest-MultipleCreateTestJSON-937113120 tempest-MultipleCreateTestJSON-937113120-project-member] Lock "0aa340f7-a219-47f5-9d7d-dd4062643cf7-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=61962) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1229.119192] env[61962]: INFO nova.compute.manager [None req-be4f8e28-478c-4391-80f6-60642ffadc1e tempest-MultipleCreateTestJSON-937113120 tempest-MultipleCreateTestJSON-937113120-project-member] [instance: 0aa340f7-a219-47f5-9d7d-dd4062643cf7] Terminating instance [ 1229.120944] env[61962]: DEBUG nova.compute.manager [None req-be4f8e28-478c-4391-80f6-60642ffadc1e tempest-MultipleCreateTestJSON-937113120 tempest-MultipleCreateTestJSON-937113120-project-member] [instance: 0aa340f7-a219-47f5-9d7d-dd4062643cf7] Start destroying the instance on the hypervisor. {{(pid=61962) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3142}} [ 1229.121148] env[61962]: DEBUG nova.virt.vmwareapi.vmops [None req-be4f8e28-478c-4391-80f6-60642ffadc1e tempest-MultipleCreateTestJSON-937113120 tempest-MultipleCreateTestJSON-937113120-project-member] [instance: 0aa340f7-a219-47f5-9d7d-dd4062643cf7] Destroying instance {{(pid=61962) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1229.121679] env[61962]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-cf84740a-2215-4ab5-853a-38e1042c82d8 {{(pid=61962) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1229.134306] env[61962]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-837b5e27-902b-47a2-8ece-0de536c9baed {{(pid=61962) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1229.145902] env[61962]: DEBUG nova.compute.manager [None req-a1dcb04c-7139-44cd-9826-36b1b040ba70 tempest-SecurityGroupsTestJSON-26720826 tempest-SecurityGroupsTestJSON-26720826-project-member] [instance: 233cd3d2-df58-4826-9bb1-a93167d4cfa4] Starting instance... {{(pid=61962) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2424}} [ 1229.170073] env[61962]: WARNING nova.virt.vmwareapi.vmops [None req-be4f8e28-478c-4391-80f6-60642ffadc1e tempest-MultipleCreateTestJSON-937113120 tempest-MultipleCreateTestJSON-937113120-project-member] [instance: 0aa340f7-a219-47f5-9d7d-dd4062643cf7] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 0aa340f7-a219-47f5-9d7d-dd4062643cf7 could not be found. [ 1229.170307] env[61962]: DEBUG nova.virt.vmwareapi.vmops [None req-be4f8e28-478c-4391-80f6-60642ffadc1e tempest-MultipleCreateTestJSON-937113120 tempest-MultipleCreateTestJSON-937113120-project-member] [instance: 0aa340f7-a219-47f5-9d7d-dd4062643cf7] Instance destroyed {{(pid=61962) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1229.170563] env[61962]: INFO nova.compute.manager [None req-be4f8e28-478c-4391-80f6-60642ffadc1e tempest-MultipleCreateTestJSON-937113120 tempest-MultipleCreateTestJSON-937113120-project-member] [instance: 0aa340f7-a219-47f5-9d7d-dd4062643cf7] Took 0.05 seconds to destroy the instance on the hypervisor. [ 1229.170776] env[61962]: DEBUG oslo.service.loopingcall [None req-be4f8e28-478c-4391-80f6-60642ffadc1e tempest-MultipleCreateTestJSON-937113120 tempest-MultipleCreateTestJSON-937113120-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61962) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1229.171042] env[61962]: DEBUG nova.compute.manager [-] [instance: 0aa340f7-a219-47f5-9d7d-dd4062643cf7] Deallocating network for instance {{(pid=61962) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2281}} [ 1229.171150] env[61962]: DEBUG nova.network.neutron [-] [instance: 0aa340f7-a219-47f5-9d7d-dd4062643cf7] deallocate_for_instance() {{(pid=61962) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1229.173629] env[61962]: DEBUG nova.compute.manager [None req-a1dcb04c-7139-44cd-9826-36b1b040ba70 tempest-SecurityGroupsTestJSON-26720826 tempest-SecurityGroupsTestJSON-26720826-project-member] [instance: 233cd3d2-df58-4826-9bb1-a93167d4cfa4] Instance disappeared before build. {{(pid=61962) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2431}} [ 1229.200895] env[61962]: DEBUG nova.network.neutron [-] [instance: 0aa340f7-a219-47f5-9d7d-dd4062643cf7] Updating instance_info_cache with network_info: [] {{(pid=61962) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1229.205224] env[61962]: DEBUG oslo_concurrency.lockutils [None req-a1dcb04c-7139-44cd-9826-36b1b040ba70 tempest-SecurityGroupsTestJSON-26720826 tempest-SecurityGroupsTestJSON-26720826-project-member] Lock "233cd3d2-df58-4826-9bb1-a93167d4cfa4" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 218.412s {{(pid=61962) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1229.213463] env[61962]: INFO nova.compute.manager [-] [instance: 0aa340f7-a219-47f5-9d7d-dd4062643cf7] Took 0.04 seconds to deallocate network for instance. [ 1229.218486] env[61962]: DEBUG nova.compute.manager [None req-0c587f3b-20e0-453c-8616-e07485ee1c45 tempest-AttachVolumeNegativeTest-1446100430 tempest-AttachVolumeNegativeTest-1446100430-project-member] [instance: 56d0be20-ee15-4c11-afa6-1630ffed7bc4] Starting instance... {{(pid=61962) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2424}} [ 1229.251492] env[61962]: DEBUG nova.compute.manager [None req-0c587f3b-20e0-453c-8616-e07485ee1c45 tempest-AttachVolumeNegativeTest-1446100430 tempest-AttachVolumeNegativeTest-1446100430-project-member] [instance: 56d0be20-ee15-4c11-afa6-1630ffed7bc4] Instance disappeared before build. {{(pid=61962) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2431}} [ 1229.277337] env[61962]: DEBUG oslo_concurrency.lockutils [None req-0c587f3b-20e0-453c-8616-e07485ee1c45 tempest-AttachVolumeNegativeTest-1446100430 tempest-AttachVolumeNegativeTest-1446100430-project-member] Lock "56d0be20-ee15-4c11-afa6-1630ffed7bc4" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 217.055s {{(pid=61962) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1229.303938] env[61962]: DEBUG nova.compute.manager [None req-9973fa98-c8f3-49d0-a7b2-f13de3c68c98 tempest-ServerActionsTestOtherB-2118315702 tempest-ServerActionsTestOtherB-2118315702-project-member] [instance: 7dc97bc2-eaf6-4df0-bd77-d4ee263f3a48] Starting instance... {{(pid=61962) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2424}} [ 1229.343626] env[61962]: DEBUG nova.compute.manager [None req-9973fa98-c8f3-49d0-a7b2-f13de3c68c98 tempest-ServerActionsTestOtherB-2118315702 tempest-ServerActionsTestOtherB-2118315702-project-member] [instance: 7dc97bc2-eaf6-4df0-bd77-d4ee263f3a48] Instance disappeared before build. {{(pid=61962) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2431}} [ 1229.375628] env[61962]: DEBUG oslo_concurrency.lockutils [None req-9973fa98-c8f3-49d0-a7b2-f13de3c68c98 tempest-ServerActionsTestOtherB-2118315702 tempest-ServerActionsTestOtherB-2118315702-project-member] Lock "7dc97bc2-eaf6-4df0-bd77-d4ee263f3a48" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 204.514s {{(pid=61962) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1229.377313] env[61962]: DEBUG oslo_concurrency.lockutils [None req-be4f8e28-478c-4391-80f6-60642ffadc1e tempest-MultipleCreateTestJSON-937113120 tempest-MultipleCreateTestJSON-937113120-project-member] Lock "0aa340f7-a219-47f5-9d7d-dd4062643cf7" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 0.261s {{(pid=61962) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1229.378110] env[61962]: DEBUG oslo_concurrency.lockutils [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Lock "0aa340f7-a219-47f5-9d7d-dd4062643cf7" acquired by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: waited 98.861s {{(pid=61962) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1229.378332] env[61962]: INFO nova.compute.manager [None req-ba337279-4320-40ca-b616-7e590432f203 None None] [instance: 0aa340f7-a219-47f5-9d7d-dd4062643cf7] During sync_power_state the instance has a pending task (deleting). Skip. [ 1229.378519] env[61962]: DEBUG oslo_concurrency.lockutils [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Lock "0aa340f7-a219-47f5-9d7d-dd4062643cf7" "released" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: held 0.000s {{(pid=61962) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1229.388436] env[61962]: DEBUG nova.compute.manager [None req-dfadb1ab-d2d4-4747-a921-c6338719a239 tempest-ServerDiskConfigTestJSON-1190480345 tempest-ServerDiskConfigTestJSON-1190480345-project-member] [instance: b3143d91-7fe7-45cf-9b90-c84d989394a3] Starting instance... {{(pid=61962) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2424}} [ 1229.416377] env[61962]: DEBUG nova.compute.manager [None req-dfadb1ab-d2d4-4747-a921-c6338719a239 tempest-ServerDiskConfigTestJSON-1190480345 tempest-ServerDiskConfigTestJSON-1190480345-project-member] [instance: b3143d91-7fe7-45cf-9b90-c84d989394a3] Instance disappeared before build. {{(pid=61962) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2431}} [ 1229.442209] env[61962]: DEBUG oslo_concurrency.lockutils [None req-dfadb1ab-d2d4-4747-a921-c6338719a239 tempest-ServerDiskConfigTestJSON-1190480345 tempest-ServerDiskConfigTestJSON-1190480345-project-member] Lock "b3143d91-7fe7-45cf-9b90-c84d989394a3" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 201.250s {{(pid=61962) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1229.452093] env[61962]: DEBUG nova.compute.manager [None req-673cd958-9a31-4e2b-aba4-288eb9a7a931 tempest-ServersTestManualDisk-1747286721 tempest-ServersTestManualDisk-1747286721-project-member] [instance: 35bf25a7-a3c2-40f1-b415-42d6167b0a1c] Starting instance... {{(pid=61962) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2424}} [ 1229.513502] env[61962]: DEBUG oslo_concurrency.lockutils [None req-673cd958-9a31-4e2b-aba4-288eb9a7a931 tempest-ServersTestManualDisk-1747286721 tempest-ServersTestManualDisk-1747286721-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61962) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1229.513712] env[61962]: DEBUG oslo_concurrency.lockutils [None req-673cd958-9a31-4e2b-aba4-288eb9a7a931 tempest-ServersTestManualDisk-1747286721 tempest-ServersTestManualDisk-1747286721-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=61962) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1229.515322] env[61962]: INFO nova.compute.claims [None req-673cd958-9a31-4e2b-aba4-288eb9a7a931 tempest-ServersTestManualDisk-1747286721 tempest-ServersTestManualDisk-1747286721-project-member] [instance: 35bf25a7-a3c2-40f1-b415-42d6167b0a1c] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1229.904771] env[61962]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-04844c58-7e09-4ef3-91cd-a28c4632dd9b {{(pid=61962) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1229.912806] env[61962]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d8c07ee2-3045-4e25-9ece-8ffd8338f930 {{(pid=61962) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1229.944364] env[61962]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a0323432-7740-4988-9e74-ee60859e4a8c {{(pid=61962) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1229.952502] env[61962]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-46c4d891-d229-4dc3-9232-b984485caa9f {{(pid=61962) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1229.965767] env[61962]: DEBUG nova.compute.provider_tree [None req-673cd958-9a31-4e2b-aba4-288eb9a7a931 tempest-ServersTestManualDisk-1747286721 tempest-ServersTestManualDisk-1747286721-project-member] Inventory has not changed in ProviderTree for provider: 5a20dc57-fddd-49ec-bab5-953a03eebaa1 {{(pid=61962) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1229.976020] env[61962]: DEBUG nova.scheduler.client.report [None req-673cd958-9a31-4e2b-aba4-288eb9a7a931 tempest-ServersTestManualDisk-1747286721 tempest-ServersTestManualDisk-1747286721-project-member] Inventory has not changed for provider 5a20dc57-fddd-49ec-bab5-953a03eebaa1 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 96, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61962) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1229.992317] env[61962]: DEBUG oslo_concurrency.lockutils [None req-673cd958-9a31-4e2b-aba4-288eb9a7a931 tempest-ServersTestManualDisk-1747286721 tempest-ServersTestManualDisk-1747286721-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.478s {{(pid=61962) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1229.992820] env[61962]: DEBUG nova.compute.manager [None req-673cd958-9a31-4e2b-aba4-288eb9a7a931 tempest-ServersTestManualDisk-1747286721 tempest-ServersTestManualDisk-1747286721-project-member] [instance: 35bf25a7-a3c2-40f1-b415-42d6167b0a1c] Start building networks asynchronously for instance. {{(pid=61962) _build_resources /opt/stack/nova/nova/compute/manager.py:2821}} [ 1230.041300] env[61962]: DEBUG nova.compute.utils [None req-673cd958-9a31-4e2b-aba4-288eb9a7a931 tempest-ServersTestManualDisk-1747286721 tempest-ServersTestManualDisk-1747286721-project-member] Using /dev/sd instead of None {{(pid=61962) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1230.042893] env[61962]: DEBUG nova.compute.manager [None req-673cd958-9a31-4e2b-aba4-288eb9a7a931 tempest-ServersTestManualDisk-1747286721 tempest-ServersTestManualDisk-1747286721-project-member] [instance: 35bf25a7-a3c2-40f1-b415-42d6167b0a1c] Allocating IP information in the background. {{(pid=61962) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1973}} [ 1230.043074] env[61962]: DEBUG nova.network.neutron [None req-673cd958-9a31-4e2b-aba4-288eb9a7a931 tempest-ServersTestManualDisk-1747286721 tempest-ServersTestManualDisk-1747286721-project-member] [instance: 35bf25a7-a3c2-40f1-b415-42d6167b0a1c] allocate_for_instance() {{(pid=61962) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 1230.052479] env[61962]: DEBUG nova.compute.manager [None req-673cd958-9a31-4e2b-aba4-288eb9a7a931 tempest-ServersTestManualDisk-1747286721 tempest-ServersTestManualDisk-1747286721-project-member] [instance: 35bf25a7-a3c2-40f1-b415-42d6167b0a1c] Start building block device mappings for instance. {{(pid=61962) _build_resources /opt/stack/nova/nova/compute/manager.py:2856}} [ 1230.103349] env[61962]: DEBUG nova.policy [None req-673cd958-9a31-4e2b-aba4-288eb9a7a931 tempest-ServersTestManualDisk-1747286721 tempest-ServersTestManualDisk-1747286721-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'e492e5ff044a45f4831b5a4a1ccf747c', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'a9c2add945d24a299e5bb0fe543deec3', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61962) authorize /opt/stack/nova/nova/policy.py:203}} [ 1230.122073] env[61962]: DEBUG nova.compute.manager [None req-673cd958-9a31-4e2b-aba4-288eb9a7a931 tempest-ServersTestManualDisk-1747286721 tempest-ServersTestManualDisk-1747286721-project-member] [instance: 35bf25a7-a3c2-40f1-b415-42d6167b0a1c] Start spawning the instance on the hypervisor. {{(pid=61962) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2630}} [ 1230.153763] env[61962]: DEBUG nova.virt.hardware [None req-673cd958-9a31-4e2b-aba4-288eb9a7a931 tempest-ServersTestManualDisk-1747286721 tempest-ServersTestManualDisk-1747286721-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-12-01T12:09:23Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=128,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-12-01T12:09:07Z,direct_url=,disk_format='vmdk',id=f684bb17-3ab2-4bd5-a19e-4c36c57d0ebe,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='eef556fb5c4f49b889491ae31a496de5',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-12-01T12:09:08Z,virtual_size=,visibility=), allow threads: False {{(pid=61962) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1230.154016] env[61962]: DEBUG nova.virt.hardware [None req-673cd958-9a31-4e2b-aba4-288eb9a7a931 tempest-ServersTestManualDisk-1747286721 tempest-ServersTestManualDisk-1747286721-project-member] Flavor limits 0:0:0 {{(pid=61962) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1230.154180] env[61962]: DEBUG nova.virt.hardware [None req-673cd958-9a31-4e2b-aba4-288eb9a7a931 tempest-ServersTestManualDisk-1747286721 tempest-ServersTestManualDisk-1747286721-project-member] Image limits 0:0:0 {{(pid=61962) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1230.154369] env[61962]: DEBUG nova.virt.hardware [None req-673cd958-9a31-4e2b-aba4-288eb9a7a931 tempest-ServersTestManualDisk-1747286721 tempest-ServersTestManualDisk-1747286721-project-member] Flavor pref 0:0:0 {{(pid=61962) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1230.154656] env[61962]: DEBUG nova.virt.hardware [None req-673cd958-9a31-4e2b-aba4-288eb9a7a931 tempest-ServersTestManualDisk-1747286721 tempest-ServersTestManualDisk-1747286721-project-member] Image pref 0:0:0 {{(pid=61962) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1230.155788] env[61962]: DEBUG nova.virt.hardware [None req-673cd958-9a31-4e2b-aba4-288eb9a7a931 tempest-ServersTestManualDisk-1747286721 tempest-ServersTestManualDisk-1747286721-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61962) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1230.155788] env[61962]: DEBUG nova.virt.hardware [None req-673cd958-9a31-4e2b-aba4-288eb9a7a931 tempest-ServersTestManualDisk-1747286721 tempest-ServersTestManualDisk-1747286721-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61962) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1230.155788] env[61962]: DEBUG nova.virt.hardware [None req-673cd958-9a31-4e2b-aba4-288eb9a7a931 tempest-ServersTestManualDisk-1747286721 tempest-ServersTestManualDisk-1747286721-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61962) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1230.155788] env[61962]: DEBUG nova.virt.hardware [None req-673cd958-9a31-4e2b-aba4-288eb9a7a931 tempest-ServersTestManualDisk-1747286721 tempest-ServersTestManualDisk-1747286721-project-member] Got 1 possible topologies {{(pid=61962) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1230.155788] env[61962]: DEBUG nova.virt.hardware [None req-673cd958-9a31-4e2b-aba4-288eb9a7a931 tempest-ServersTestManualDisk-1747286721 tempest-ServersTestManualDisk-1747286721-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61962) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1230.156018] env[61962]: DEBUG nova.virt.hardware [None req-673cd958-9a31-4e2b-aba4-288eb9a7a931 tempest-ServersTestManualDisk-1747286721 tempest-ServersTestManualDisk-1747286721-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61962) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1230.156637] env[61962]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9b422cdb-0b75-4f9b-b48d-f4ebf0a43b87 {{(pid=61962) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1230.159923] env[61962]: DEBUG oslo_service.periodic_task [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=61962) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1230.160092] env[61962]: DEBUG nova.compute.manager [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Starting heal instance info cache {{(pid=61962) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10007}} [ 1230.160216] env[61962]: DEBUG nova.compute.manager [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Rebuilding the list of instances to heal {{(pid=61962) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10011}} [ 1230.168213] env[61962]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8c67ea46-5313-4b16-8e59-567aa2581b65 {{(pid=61962) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1230.188392] env[61962]: DEBUG nova.compute.manager [None req-ba337279-4320-40ca-b616-7e590432f203 None None] [instance: ff7d9da5-bc9a-41f2-a2e4-521d87b22117] Skipping network cache update for instance because it is Building. {{(pid=61962) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10020}} [ 1230.188589] env[61962]: DEBUG nova.compute.manager [None req-ba337279-4320-40ca-b616-7e590432f203 None None] [instance: bf82c44c-95ce-4e4c-a32f-444bba1ef5cd] Skipping network cache update for instance because it is Building. {{(pid=61962) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10020}} [ 1230.188699] env[61962]: DEBUG nova.compute.manager [None req-ba337279-4320-40ca-b616-7e590432f203 None None] [instance: 1dcafe3b-989b-4290-be54-ceccc62a323f] Skipping network cache update for instance because it is Building. {{(pid=61962) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10020}} [ 1230.188841] env[61962]: DEBUG nova.compute.manager [None req-ba337279-4320-40ca-b616-7e590432f203 None None] [instance: 454eebcc-ea18-4046-ba2e-01de1c8a954a] Skipping network cache update for instance because it is Building. {{(pid=61962) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10020}} [ 1230.189051] env[61962]: DEBUG nova.compute.manager [None req-ba337279-4320-40ca-b616-7e590432f203 None None] [instance: e76b5908-3bad-4c19-90e7-b15ca253ad80] Skipping network cache update for instance because it is Building. {{(pid=61962) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10020}} [ 1230.189051] env[61962]: DEBUG nova.compute.manager [None req-ba337279-4320-40ca-b616-7e590432f203 None None] [instance: 8ecd73a7-7f23-4994-85b9-479584831687] Skipping network cache update for instance because it is Building. {{(pid=61962) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10020}} [ 1230.189183] env[61962]: DEBUG nova.compute.manager [None req-ba337279-4320-40ca-b616-7e590432f203 None None] [instance: d89abe69-dd71-4cfe-b80b-3b2aeba0b8a9] Skipping network cache update for instance because it is Building. {{(pid=61962) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10020}} [ 1230.189325] env[61962]: DEBUG nova.compute.manager [None req-ba337279-4320-40ca-b616-7e590432f203 None None] [instance: 5572a1d3-5d2f-4af5-b9f6-ec57506e2069] Skipping network cache update for instance because it is Building. {{(pid=61962) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10020}} [ 1230.189407] env[61962]: DEBUG nova.compute.manager [None req-ba337279-4320-40ca-b616-7e590432f203 None None] [instance: d970d822-12ab-43cc-8d34-abc6eff8a68d] Skipping network cache update for instance because it is Building. {{(pid=61962) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10020}} [ 1230.189526] env[61962]: DEBUG nova.compute.manager [None req-ba337279-4320-40ca-b616-7e590432f203 None None] [instance: 35bf25a7-a3c2-40f1-b415-42d6167b0a1c] Skipping network cache update for instance because it is Building. {{(pid=61962) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10020}} [ 1230.189680] env[61962]: DEBUG nova.compute.manager [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Didn't find any instances for network info cache update. {{(pid=61962) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10093}} [ 1230.770756] env[61962]: DEBUG nova.network.neutron [None req-673cd958-9a31-4e2b-aba4-288eb9a7a931 tempest-ServersTestManualDisk-1747286721 tempest-ServersTestManualDisk-1747286721-project-member] [instance: 35bf25a7-a3c2-40f1-b415-42d6167b0a1c] Successfully created port: 22d6ef13-5c72-4799-9359-286fa0223faf {{(pid=61962) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1231.158465] env[61962]: DEBUG oslo_service.periodic_task [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=61962) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1231.158465] env[61962]: DEBUG oslo_service.periodic_task [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=61962) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1231.593800] env[61962]: DEBUG nova.network.neutron [None req-673cd958-9a31-4e2b-aba4-288eb9a7a931 tempest-ServersTestManualDisk-1747286721 tempest-ServersTestManualDisk-1747286721-project-member] [instance: 35bf25a7-a3c2-40f1-b415-42d6167b0a1c] Successfully updated port: 22d6ef13-5c72-4799-9359-286fa0223faf {{(pid=61962) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1231.609233] env[61962]: DEBUG oslo_concurrency.lockutils [None req-673cd958-9a31-4e2b-aba4-288eb9a7a931 tempest-ServersTestManualDisk-1747286721 tempest-ServersTestManualDisk-1747286721-project-member] Acquiring lock "refresh_cache-35bf25a7-a3c2-40f1-b415-42d6167b0a1c" {{(pid=61962) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1231.609306] env[61962]: DEBUG oslo_concurrency.lockutils [None req-673cd958-9a31-4e2b-aba4-288eb9a7a931 tempest-ServersTestManualDisk-1747286721 tempest-ServersTestManualDisk-1747286721-project-member] Acquired lock "refresh_cache-35bf25a7-a3c2-40f1-b415-42d6167b0a1c" {{(pid=61962) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1231.609581] env[61962]: DEBUG nova.network.neutron [None req-673cd958-9a31-4e2b-aba4-288eb9a7a931 tempest-ServersTestManualDisk-1747286721 tempest-ServersTestManualDisk-1747286721-project-member] [instance: 35bf25a7-a3c2-40f1-b415-42d6167b0a1c] Building network info cache for instance {{(pid=61962) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 1231.612613] env[61962]: DEBUG nova.compute.manager [req-a03adc66-8795-4768-b7b0-e38813e7e155 req-ceffcb99-a95c-4386-b171-e622500e10b6 service nova] [instance: 35bf25a7-a3c2-40f1-b415-42d6167b0a1c] Received event network-vif-plugged-22d6ef13-5c72-4799-9359-286fa0223faf {{(pid=61962) external_instance_event /opt/stack/nova/nova/compute/manager.py:11210}} [ 1231.612870] env[61962]: DEBUG oslo_concurrency.lockutils [req-a03adc66-8795-4768-b7b0-e38813e7e155 req-ceffcb99-a95c-4386-b171-e622500e10b6 service nova] Acquiring lock "35bf25a7-a3c2-40f1-b415-42d6167b0a1c-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=61962) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1231.613096] env[61962]: DEBUG oslo_concurrency.lockutils [req-a03adc66-8795-4768-b7b0-e38813e7e155 req-ceffcb99-a95c-4386-b171-e622500e10b6 service nova] Lock "35bf25a7-a3c2-40f1-b415-42d6167b0a1c-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=61962) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1231.613272] env[61962]: DEBUG oslo_concurrency.lockutils [req-a03adc66-8795-4768-b7b0-e38813e7e155 req-ceffcb99-a95c-4386-b171-e622500e10b6 service nova] Lock "35bf25a7-a3c2-40f1-b415-42d6167b0a1c-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=61962) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1231.613703] env[61962]: DEBUG nova.compute.manager [req-a03adc66-8795-4768-b7b0-e38813e7e155 req-ceffcb99-a95c-4386-b171-e622500e10b6 service nova] [instance: 35bf25a7-a3c2-40f1-b415-42d6167b0a1c] No waiting events found dispatching network-vif-plugged-22d6ef13-5c72-4799-9359-286fa0223faf {{(pid=61962) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1231.613905] env[61962]: WARNING nova.compute.manager [req-a03adc66-8795-4768-b7b0-e38813e7e155 req-ceffcb99-a95c-4386-b171-e622500e10b6 service nova] [instance: 35bf25a7-a3c2-40f1-b415-42d6167b0a1c] Received unexpected event network-vif-plugged-22d6ef13-5c72-4799-9359-286fa0223faf for instance with vm_state building and task_state spawning. [ 1231.667985] env[61962]: DEBUG nova.network.neutron [None req-673cd958-9a31-4e2b-aba4-288eb9a7a931 tempest-ServersTestManualDisk-1747286721 tempest-ServersTestManualDisk-1747286721-project-member] [instance: 35bf25a7-a3c2-40f1-b415-42d6167b0a1c] Instance cache missing network info. {{(pid=61962) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 1231.916293] env[61962]: DEBUG nova.network.neutron [None req-673cd958-9a31-4e2b-aba4-288eb9a7a931 tempest-ServersTestManualDisk-1747286721 tempest-ServersTestManualDisk-1747286721-project-member] [instance: 35bf25a7-a3c2-40f1-b415-42d6167b0a1c] Updating instance_info_cache with network_info: [{"id": "22d6ef13-5c72-4799-9359-286fa0223faf", "address": "fa:16:3e:2e:37:07", "network": {"id": "0ca60da7-4dac-48f7-bcfa-343a9943c56a", "bridge": "br-int", "label": "tempest-ServersTestManualDisk-1439656273-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "a9c2add945d24a299e5bb0fe543deec3", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "309d7cfa-b4da-4eec-9f4b-2e10d215fac7", "external-id": "nsx-vlan-transportzone-285", "segmentation_id": 285, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap22d6ef13-5c", "ovs_interfaceid": "22d6ef13-5c72-4799-9359-286fa0223faf", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61962) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1231.934210] env[61962]: DEBUG oslo_concurrency.lockutils [None req-673cd958-9a31-4e2b-aba4-288eb9a7a931 tempest-ServersTestManualDisk-1747286721 tempest-ServersTestManualDisk-1747286721-project-member] Releasing lock "refresh_cache-35bf25a7-a3c2-40f1-b415-42d6167b0a1c" {{(pid=61962) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1231.934510] env[61962]: DEBUG nova.compute.manager [None req-673cd958-9a31-4e2b-aba4-288eb9a7a931 tempest-ServersTestManualDisk-1747286721 tempest-ServersTestManualDisk-1747286721-project-member] [instance: 35bf25a7-a3c2-40f1-b415-42d6167b0a1c] Instance network_info: |[{"id": "22d6ef13-5c72-4799-9359-286fa0223faf", "address": "fa:16:3e:2e:37:07", "network": {"id": "0ca60da7-4dac-48f7-bcfa-343a9943c56a", "bridge": "br-int", "label": "tempest-ServersTestManualDisk-1439656273-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "a9c2add945d24a299e5bb0fe543deec3", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "309d7cfa-b4da-4eec-9f4b-2e10d215fac7", "external-id": "nsx-vlan-transportzone-285", "segmentation_id": 285, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap22d6ef13-5c", "ovs_interfaceid": "22d6ef13-5c72-4799-9359-286fa0223faf", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=61962) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1231.934927] env[61962]: DEBUG nova.virt.vmwareapi.vmops [None req-673cd958-9a31-4e2b-aba4-288eb9a7a931 tempest-ServersTestManualDisk-1747286721 tempest-ServersTestManualDisk-1747286721-project-member] [instance: 35bf25a7-a3c2-40f1-b415-42d6167b0a1c] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:2e:37:07', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '309d7cfa-b4da-4eec-9f4b-2e10d215fac7', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '22d6ef13-5c72-4799-9359-286fa0223faf', 'vif_model': 'vmxnet3'}] {{(pid=61962) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1231.942659] env[61962]: DEBUG nova.virt.vmwareapi.vm_util [None req-673cd958-9a31-4e2b-aba4-288eb9a7a931 tempest-ServersTestManualDisk-1747286721 tempest-ServersTestManualDisk-1747286721-project-member] Creating folder: Project (a9c2add945d24a299e5bb0fe543deec3). Parent ref: group-v953327. {{(pid=61962) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 1231.943262] env[61962]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-8af1466b-cedb-4cb0-8a2b-6e7a562cf15e {{(pid=61962) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1231.954518] env[61962]: INFO nova.virt.vmwareapi.vm_util [None req-673cd958-9a31-4e2b-aba4-288eb9a7a931 tempest-ServersTestManualDisk-1747286721 tempest-ServersTestManualDisk-1747286721-project-member] Created folder: Project (a9c2add945d24a299e5bb0fe543deec3) in parent group-v953327. [ 1231.954719] env[61962]: DEBUG nova.virt.vmwareapi.vm_util [None req-673cd958-9a31-4e2b-aba4-288eb9a7a931 tempest-ServersTestManualDisk-1747286721 tempest-ServersTestManualDisk-1747286721-project-member] Creating folder: Instances. Parent ref: group-v953397. {{(pid=61962) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 1231.954960] env[61962]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-3f6290c6-a234-440a-975b-e317f0d79337 {{(pid=61962) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1231.965131] env[61962]: INFO nova.virt.vmwareapi.vm_util [None req-673cd958-9a31-4e2b-aba4-288eb9a7a931 tempest-ServersTestManualDisk-1747286721 tempest-ServersTestManualDisk-1747286721-project-member] Created folder: Instances in parent group-v953397. [ 1231.965378] env[61962]: DEBUG oslo.service.loopingcall [None req-673cd958-9a31-4e2b-aba4-288eb9a7a931 tempest-ServersTestManualDisk-1747286721 tempest-ServersTestManualDisk-1747286721-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=61962) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1231.965567] env[61962]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 35bf25a7-a3c2-40f1-b415-42d6167b0a1c] Creating VM on the ESX host {{(pid=61962) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 1231.965770] env[61962]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-8481c357-c804-4140-81a5-c9c0cd36fb92 {{(pid=61962) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1231.985356] env[61962]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1231.985356] env[61962]: value = "task-4892019" [ 1231.985356] env[61962]: _type = "Task" [ 1231.985356] env[61962]: } to complete. {{(pid=61962) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1231.993689] env[61962]: DEBUG oslo_vmware.api [-] Task: {'id': task-4892019, 'name': CreateVM_Task} progress is 0%. {{(pid=61962) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1232.495495] env[61962]: DEBUG oslo_vmware.api [-] Task: {'id': task-4892019, 'name': CreateVM_Task, 'duration_secs': 0.481512} completed successfully. {{(pid=61962) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1232.495867] env[61962]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 35bf25a7-a3c2-40f1-b415-42d6167b0a1c] Created VM on the ESX host {{(pid=61962) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 1232.507528] env[61962]: DEBUG oslo_concurrency.lockutils [None req-673cd958-9a31-4e2b-aba4-288eb9a7a931 tempest-ServersTestManualDisk-1747286721 tempest-ServersTestManualDisk-1747286721-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/f684bb17-3ab2-4bd5-a19e-4c36c57d0ebe" {{(pid=61962) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1232.507706] env[61962]: DEBUG oslo_concurrency.lockutils [None req-673cd958-9a31-4e2b-aba4-288eb9a7a931 tempest-ServersTestManualDisk-1747286721 tempest-ServersTestManualDisk-1747286721-project-member] Acquired lock "[datastore2] devstack-image-cache_base/f684bb17-3ab2-4bd5-a19e-4c36c57d0ebe" {{(pid=61962) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1232.508054] env[61962]: DEBUG oslo_concurrency.lockutils [None req-673cd958-9a31-4e2b-aba4-288eb9a7a931 tempest-ServersTestManualDisk-1747286721 tempest-ServersTestManualDisk-1747286721-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/f684bb17-3ab2-4bd5-a19e-4c36c57d0ebe" {{(pid=61962) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1232.509215] env[61962]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-26abec01-5b0b-40cf-9a24-591625c564e6 {{(pid=61962) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1232.513880] env[61962]: DEBUG oslo_vmware.api [None req-673cd958-9a31-4e2b-aba4-288eb9a7a931 tempest-ServersTestManualDisk-1747286721 tempest-ServersTestManualDisk-1747286721-project-member] Waiting for the task: (returnval){ [ 1232.513880] env[61962]: value = "session[5210918f-aadc-9b29-42fa-0929c2e42627]523796ab-6fcf-0bfd-cdd9-47e06f20d8e7" [ 1232.513880] env[61962]: _type = "Task" [ 1232.513880] env[61962]: } to complete. {{(pid=61962) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1232.522845] env[61962]: DEBUG oslo_vmware.api [None req-673cd958-9a31-4e2b-aba4-288eb9a7a931 tempest-ServersTestManualDisk-1747286721 tempest-ServersTestManualDisk-1747286721-project-member] Task: {'id': session[5210918f-aadc-9b29-42fa-0929c2e42627]523796ab-6fcf-0bfd-cdd9-47e06f20d8e7, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61962) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1233.024965] env[61962]: DEBUG oslo_concurrency.lockutils [None req-673cd958-9a31-4e2b-aba4-288eb9a7a931 tempest-ServersTestManualDisk-1747286721 tempest-ServersTestManualDisk-1747286721-project-member] Releasing lock "[datastore2] devstack-image-cache_base/f684bb17-3ab2-4bd5-a19e-4c36c57d0ebe" {{(pid=61962) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1233.025283] env[61962]: DEBUG nova.virt.vmwareapi.vmops [None req-673cd958-9a31-4e2b-aba4-288eb9a7a931 tempest-ServersTestManualDisk-1747286721 tempest-ServersTestManualDisk-1747286721-project-member] [instance: 35bf25a7-a3c2-40f1-b415-42d6167b0a1c] Processing image f684bb17-3ab2-4bd5-a19e-4c36c57d0ebe {{(pid=61962) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1233.025531] env[61962]: DEBUG oslo_concurrency.lockutils [None req-673cd958-9a31-4e2b-aba4-288eb9a7a931 tempest-ServersTestManualDisk-1747286721 tempest-ServersTestManualDisk-1747286721-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/f684bb17-3ab2-4bd5-a19e-4c36c57d0ebe/f684bb17-3ab2-4bd5-a19e-4c36c57d0ebe.vmdk" {{(pid=61962) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1233.158513] env[61962]: DEBUG oslo_service.periodic_task [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=61962) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1233.640068] env[61962]: DEBUG nova.compute.manager [req-b8c94d5f-4b19-4179-8bc7-09173a8fd9bd req-fc2e72e3-dc31-45ef-bb1a-9967fd326527 service nova] [instance: 35bf25a7-a3c2-40f1-b415-42d6167b0a1c] Received event network-changed-22d6ef13-5c72-4799-9359-286fa0223faf {{(pid=61962) external_instance_event /opt/stack/nova/nova/compute/manager.py:11210}} [ 1233.640068] env[61962]: DEBUG nova.compute.manager [req-b8c94d5f-4b19-4179-8bc7-09173a8fd9bd req-fc2e72e3-dc31-45ef-bb1a-9967fd326527 service nova] [instance: 35bf25a7-a3c2-40f1-b415-42d6167b0a1c] Refreshing instance network info cache due to event network-changed-22d6ef13-5c72-4799-9359-286fa0223faf. {{(pid=61962) external_instance_event /opt/stack/nova/nova/compute/manager.py:11215}} [ 1233.640651] env[61962]: DEBUG oslo_concurrency.lockutils [req-b8c94d5f-4b19-4179-8bc7-09173a8fd9bd req-fc2e72e3-dc31-45ef-bb1a-9967fd326527 service nova] Acquiring lock "refresh_cache-35bf25a7-a3c2-40f1-b415-42d6167b0a1c" {{(pid=61962) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1233.641042] env[61962]: DEBUG oslo_concurrency.lockutils [req-b8c94d5f-4b19-4179-8bc7-09173a8fd9bd req-fc2e72e3-dc31-45ef-bb1a-9967fd326527 service nova] Acquired lock "refresh_cache-35bf25a7-a3c2-40f1-b415-42d6167b0a1c" {{(pid=61962) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1233.641369] env[61962]: DEBUG nova.network.neutron [req-b8c94d5f-4b19-4179-8bc7-09173a8fd9bd req-fc2e72e3-dc31-45ef-bb1a-9967fd326527 service nova] [instance: 35bf25a7-a3c2-40f1-b415-42d6167b0a1c] Refreshing network info cache for port 22d6ef13-5c72-4799-9359-286fa0223faf {{(pid=61962) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 1234.158226] env[61962]: DEBUG oslo_service.periodic_task [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=61962) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1234.219841] env[61962]: DEBUG nova.network.neutron [req-b8c94d5f-4b19-4179-8bc7-09173a8fd9bd req-fc2e72e3-dc31-45ef-bb1a-9967fd326527 service nova] [instance: 35bf25a7-a3c2-40f1-b415-42d6167b0a1c] Updated VIF entry in instance network info cache for port 22d6ef13-5c72-4799-9359-286fa0223faf. {{(pid=61962) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 1234.220257] env[61962]: DEBUG nova.network.neutron [req-b8c94d5f-4b19-4179-8bc7-09173a8fd9bd req-fc2e72e3-dc31-45ef-bb1a-9967fd326527 service nova] [instance: 35bf25a7-a3c2-40f1-b415-42d6167b0a1c] Updating instance_info_cache with network_info: [{"id": "22d6ef13-5c72-4799-9359-286fa0223faf", "address": "fa:16:3e:2e:37:07", "network": {"id": "0ca60da7-4dac-48f7-bcfa-343a9943c56a", "bridge": "br-int", "label": "tempest-ServersTestManualDisk-1439656273-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "a9c2add945d24a299e5bb0fe543deec3", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "309d7cfa-b4da-4eec-9f4b-2e10d215fac7", "external-id": "nsx-vlan-transportzone-285", "segmentation_id": 285, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap22d6ef13-5c", "ovs_interfaceid": "22d6ef13-5c72-4799-9359-286fa0223faf", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61962) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1234.234298] env[61962]: DEBUG oslo_concurrency.lockutils [req-b8c94d5f-4b19-4179-8bc7-09173a8fd9bd req-fc2e72e3-dc31-45ef-bb1a-9967fd326527 service nova] Releasing lock "refresh_cache-35bf25a7-a3c2-40f1-b415-42d6167b0a1c" {{(pid=61962) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1238.516981] env[61962]: DEBUG oslo_concurrency.lockutils [None req-ae999722-5278-4a0e-8508-ee195677c4b3 tempest-ServerMetadataTestJSON-1606436818 tempest-ServerMetadataTestJSON-1606436818-project-member] Acquiring lock "3d2749f8-18d2-4631-adc2-093988125eb4" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61962) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1238.516981] env[61962]: DEBUG oslo_concurrency.lockutils [None req-ae999722-5278-4a0e-8508-ee195677c4b3 tempest-ServerMetadataTestJSON-1606436818 tempest-ServerMetadataTestJSON-1606436818-project-member] Lock "3d2749f8-18d2-4631-adc2-093988125eb4" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61962) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1253.573742] env[61962]: DEBUG oslo_concurrency.lockutils [None req-167bc033-d799-42bb-8ef9-8056818dfa51 tempest-ServersTestManualDisk-1747286721 tempest-ServersTestManualDisk-1747286721-project-member] Acquiring lock "35bf25a7-a3c2-40f1-b415-42d6167b0a1c" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=61962) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1276.716396] env[61962]: WARNING oslo_vmware.rw_handles [None req-7546295f-5c69-4e76-85a0-ed1a4d4adbfc tempest-ServerRescueTestJSON-985981031 tempest-ServerRescueTestJSON-985981031-project-member] Error occurred while reading the HTTP response.: http.client.RemoteDisconnected: Remote end closed connection without response [ 1276.716396] env[61962]: ERROR oslo_vmware.rw_handles Traceback (most recent call last): [ 1276.716396] env[61962]: ERROR oslo_vmware.rw_handles File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py", line 283, in close [ 1276.716396] env[61962]: ERROR oslo_vmware.rw_handles self._conn.getresponse() [ 1276.716396] env[61962]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 1375, in getresponse [ 1276.716396] env[61962]: ERROR oslo_vmware.rw_handles response.begin() [ 1276.716396] env[61962]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 318, in begin [ 1276.716396] env[61962]: ERROR oslo_vmware.rw_handles version, status, reason = self._read_status() [ 1276.716396] env[61962]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 287, in _read_status [ 1276.716396] env[61962]: ERROR oslo_vmware.rw_handles raise RemoteDisconnected("Remote end closed connection without" [ 1276.716396] env[61962]: ERROR oslo_vmware.rw_handles http.client.RemoteDisconnected: Remote end closed connection without response [ 1276.716396] env[61962]: ERROR oslo_vmware.rw_handles [ 1276.717827] env[61962]: DEBUG nova.virt.vmwareapi.images [None req-7546295f-5c69-4e76-85a0-ed1a4d4adbfc tempest-ServerRescueTestJSON-985981031 tempest-ServerRescueTestJSON-985981031-project-member] [instance: ff7d9da5-bc9a-41f2-a2e4-521d87b22117] Downloaded image file data f684bb17-3ab2-4bd5-a19e-4c36c57d0ebe to vmware_temp/81bd7d81-b59d-4f28-9152-5869debf50a2/f684bb17-3ab2-4bd5-a19e-4c36c57d0ebe/tmp-sparse.vmdk on the data store datastore2 {{(pid=61962) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:258}} [ 1276.718772] env[61962]: DEBUG nova.virt.vmwareapi.vmops [None req-7546295f-5c69-4e76-85a0-ed1a4d4adbfc tempest-ServerRescueTestJSON-985981031 tempest-ServerRescueTestJSON-985981031-project-member] [instance: ff7d9da5-bc9a-41f2-a2e4-521d87b22117] Caching image {{(pid=61962) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 1276.719221] env[61962]: DEBUG nova.virt.vmwareapi.vm_util [None req-7546295f-5c69-4e76-85a0-ed1a4d4adbfc tempest-ServerRescueTestJSON-985981031 tempest-ServerRescueTestJSON-985981031-project-member] Copying Virtual Disk [datastore2] vmware_temp/81bd7d81-b59d-4f28-9152-5869debf50a2/f684bb17-3ab2-4bd5-a19e-4c36c57d0ebe/tmp-sparse.vmdk to [datastore2] vmware_temp/81bd7d81-b59d-4f28-9152-5869debf50a2/f684bb17-3ab2-4bd5-a19e-4c36c57d0ebe/f684bb17-3ab2-4bd5-a19e-4c36c57d0ebe.vmdk {{(pid=61962) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 1276.719563] env[61962]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-465b5a94-940c-46f7-91b6-e4d6cfba7082 {{(pid=61962) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1276.729146] env[61962]: DEBUG oslo_vmware.api [None req-7546295f-5c69-4e76-85a0-ed1a4d4adbfc tempest-ServerRescueTestJSON-985981031 tempest-ServerRescueTestJSON-985981031-project-member] Waiting for the task: (returnval){ [ 1276.729146] env[61962]: value = "task-4892020" [ 1276.729146] env[61962]: _type = "Task" [ 1276.729146] env[61962]: } to complete. {{(pid=61962) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1276.738621] env[61962]: DEBUG oslo_vmware.api [None req-7546295f-5c69-4e76-85a0-ed1a4d4adbfc tempest-ServerRescueTestJSON-985981031 tempest-ServerRescueTestJSON-985981031-project-member] Task: {'id': task-4892020, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61962) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1277.240844] env[61962]: DEBUG oslo_vmware.exceptions [None req-7546295f-5c69-4e76-85a0-ed1a4d4adbfc tempest-ServerRescueTestJSON-985981031 tempest-ServerRescueTestJSON-985981031-project-member] Fault InvalidArgument not matched. {{(pid=61962) get_fault_class /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/exceptions.py:290}} [ 1277.241223] env[61962]: DEBUG oslo_concurrency.lockutils [None req-7546295f-5c69-4e76-85a0-ed1a4d4adbfc tempest-ServerRescueTestJSON-985981031 tempest-ServerRescueTestJSON-985981031-project-member] Releasing lock "[datastore2] devstack-image-cache_base/f684bb17-3ab2-4bd5-a19e-4c36c57d0ebe/f684bb17-3ab2-4bd5-a19e-4c36c57d0ebe.vmdk" {{(pid=61962) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1277.241780] env[61962]: ERROR nova.compute.manager [None req-7546295f-5c69-4e76-85a0-ed1a4d4adbfc tempest-ServerRescueTestJSON-985981031 tempest-ServerRescueTestJSON-985981031-project-member] [instance: ff7d9da5-bc9a-41f2-a2e4-521d87b22117] Instance failed to spawn: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1277.241780] env[61962]: Faults: ['InvalidArgument'] [ 1277.241780] env[61962]: ERROR nova.compute.manager [instance: ff7d9da5-bc9a-41f2-a2e4-521d87b22117] Traceback (most recent call last): [ 1277.241780] env[61962]: ERROR nova.compute.manager [instance: ff7d9da5-bc9a-41f2-a2e4-521d87b22117] File "/opt/stack/nova/nova/compute/manager.py", line 2886, in _build_resources [ 1277.241780] env[61962]: ERROR nova.compute.manager [instance: ff7d9da5-bc9a-41f2-a2e4-521d87b22117] yield resources [ 1277.241780] env[61962]: ERROR nova.compute.manager [instance: ff7d9da5-bc9a-41f2-a2e4-521d87b22117] File "/opt/stack/nova/nova/compute/manager.py", line 2633, in _build_and_run_instance [ 1277.241780] env[61962]: ERROR nova.compute.manager [instance: ff7d9da5-bc9a-41f2-a2e4-521d87b22117] self.driver.spawn(context, instance, image_meta, [ 1277.241780] env[61962]: ERROR nova.compute.manager [instance: ff7d9da5-bc9a-41f2-a2e4-521d87b22117] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 1277.241780] env[61962]: ERROR nova.compute.manager [instance: ff7d9da5-bc9a-41f2-a2e4-521d87b22117] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1277.241780] env[61962]: ERROR nova.compute.manager [instance: ff7d9da5-bc9a-41f2-a2e4-521d87b22117] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 1277.241780] env[61962]: ERROR nova.compute.manager [instance: ff7d9da5-bc9a-41f2-a2e4-521d87b22117] self._fetch_image_if_missing(context, vi) [ 1277.241780] env[61962]: ERROR nova.compute.manager [instance: ff7d9da5-bc9a-41f2-a2e4-521d87b22117] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 1277.242273] env[61962]: ERROR nova.compute.manager [instance: ff7d9da5-bc9a-41f2-a2e4-521d87b22117] image_cache(vi, tmp_image_ds_loc) [ 1277.242273] env[61962]: ERROR nova.compute.manager [instance: ff7d9da5-bc9a-41f2-a2e4-521d87b22117] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 1277.242273] env[61962]: ERROR nova.compute.manager [instance: ff7d9da5-bc9a-41f2-a2e4-521d87b22117] vm_util.copy_virtual_disk( [ 1277.242273] env[61962]: ERROR nova.compute.manager [instance: ff7d9da5-bc9a-41f2-a2e4-521d87b22117] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 1277.242273] env[61962]: ERROR nova.compute.manager [instance: ff7d9da5-bc9a-41f2-a2e4-521d87b22117] session._wait_for_task(vmdk_copy_task) [ 1277.242273] env[61962]: ERROR nova.compute.manager [instance: ff7d9da5-bc9a-41f2-a2e4-521d87b22117] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 1277.242273] env[61962]: ERROR nova.compute.manager [instance: ff7d9da5-bc9a-41f2-a2e4-521d87b22117] return self.wait_for_task(task_ref) [ 1277.242273] env[61962]: ERROR nova.compute.manager [instance: ff7d9da5-bc9a-41f2-a2e4-521d87b22117] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 1277.242273] env[61962]: ERROR nova.compute.manager [instance: ff7d9da5-bc9a-41f2-a2e4-521d87b22117] return evt.wait() [ 1277.242273] env[61962]: ERROR nova.compute.manager [instance: ff7d9da5-bc9a-41f2-a2e4-521d87b22117] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 1277.242273] env[61962]: ERROR nova.compute.manager [instance: ff7d9da5-bc9a-41f2-a2e4-521d87b22117] result = hub.switch() [ 1277.242273] env[61962]: ERROR nova.compute.manager [instance: ff7d9da5-bc9a-41f2-a2e4-521d87b22117] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 1277.242273] env[61962]: ERROR nova.compute.manager [instance: ff7d9da5-bc9a-41f2-a2e4-521d87b22117] return self.greenlet.switch() [ 1277.242721] env[61962]: ERROR nova.compute.manager [instance: ff7d9da5-bc9a-41f2-a2e4-521d87b22117] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 1277.242721] env[61962]: ERROR nova.compute.manager [instance: ff7d9da5-bc9a-41f2-a2e4-521d87b22117] self.f(*self.args, **self.kw) [ 1277.242721] env[61962]: ERROR nova.compute.manager [instance: ff7d9da5-bc9a-41f2-a2e4-521d87b22117] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 1277.242721] env[61962]: ERROR nova.compute.manager [instance: ff7d9da5-bc9a-41f2-a2e4-521d87b22117] raise exceptions.translate_fault(task_info.error) [ 1277.242721] env[61962]: ERROR nova.compute.manager [instance: ff7d9da5-bc9a-41f2-a2e4-521d87b22117] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1277.242721] env[61962]: ERROR nova.compute.manager [instance: ff7d9da5-bc9a-41f2-a2e4-521d87b22117] Faults: ['InvalidArgument'] [ 1277.242721] env[61962]: ERROR nova.compute.manager [instance: ff7d9da5-bc9a-41f2-a2e4-521d87b22117] [ 1277.242721] env[61962]: INFO nova.compute.manager [None req-7546295f-5c69-4e76-85a0-ed1a4d4adbfc tempest-ServerRescueTestJSON-985981031 tempest-ServerRescueTestJSON-985981031-project-member] [instance: ff7d9da5-bc9a-41f2-a2e4-521d87b22117] Terminating instance [ 1277.243756] env[61962]: DEBUG oslo_concurrency.lockutils [None req-39280a80-f6a9-4818-9c77-d5be7d130e72 tempest-ServersTestFqdnHostnames-1235394237 tempest-ServersTestFqdnHostnames-1235394237-project-member] Acquired lock "[datastore2] devstack-image-cache_base/f684bb17-3ab2-4bd5-a19e-4c36c57d0ebe/f684bb17-3ab2-4bd5-a19e-4c36c57d0ebe.vmdk" {{(pid=61962) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1277.243978] env[61962]: DEBUG nova.virt.vmwareapi.ds_util [None req-39280a80-f6a9-4818-9c77-d5be7d130e72 tempest-ServersTestFqdnHostnames-1235394237 tempest-ServersTestFqdnHostnames-1235394237-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=61962) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1277.244227] env[61962]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-99caf3ac-f7a2-4996-a2dd-7a1fd5f186e0 {{(pid=61962) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1277.247787] env[61962]: DEBUG nova.compute.manager [None req-7546295f-5c69-4e76-85a0-ed1a4d4adbfc tempest-ServerRescueTestJSON-985981031 tempest-ServerRescueTestJSON-985981031-project-member] [instance: ff7d9da5-bc9a-41f2-a2e4-521d87b22117] Start destroying the instance on the hypervisor. {{(pid=61962) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3142}} [ 1277.247989] env[61962]: DEBUG nova.virt.vmwareapi.vmops [None req-7546295f-5c69-4e76-85a0-ed1a4d4adbfc tempest-ServerRescueTestJSON-985981031 tempest-ServerRescueTestJSON-985981031-project-member] [instance: ff7d9da5-bc9a-41f2-a2e4-521d87b22117] Destroying instance {{(pid=61962) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1277.248751] env[61962]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0a7405ec-4d5d-411f-a959-bfd7a177ba64 {{(pid=61962) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1277.253306] env[61962]: DEBUG nova.virt.vmwareapi.ds_util [None req-39280a80-f6a9-4818-9c77-d5be7d130e72 tempest-ServersTestFqdnHostnames-1235394237 tempest-ServersTestFqdnHostnames-1235394237-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=61962) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1277.253508] env[61962]: DEBUG nova.virt.vmwareapi.vmops [None req-39280a80-f6a9-4818-9c77-d5be7d130e72 tempest-ServersTestFqdnHostnames-1235394237 tempest-ServersTestFqdnHostnames-1235394237-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=61962) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 1277.256410] env[61962]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-f95dca4b-f0c3-4d3e-86a8-289e16892530 {{(pid=61962) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1277.258761] env[61962]: DEBUG nova.virt.vmwareapi.vmops [None req-7546295f-5c69-4e76-85a0-ed1a4d4adbfc tempest-ServerRescueTestJSON-985981031 tempest-ServerRescueTestJSON-985981031-project-member] [instance: ff7d9da5-bc9a-41f2-a2e4-521d87b22117] Unregistering the VM {{(pid=61962) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 1277.259012] env[61962]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-2a08454f-6b0f-4685-8d10-3f067cff29f9 {{(pid=61962) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1277.263530] env[61962]: DEBUG oslo_vmware.api [None req-39280a80-f6a9-4818-9c77-d5be7d130e72 tempest-ServersTestFqdnHostnames-1235394237 tempest-ServersTestFqdnHostnames-1235394237-project-member] Waiting for the task: (returnval){ [ 1277.263530] env[61962]: value = "session[5210918f-aadc-9b29-42fa-0929c2e42627]52cf166f-a468-7926-279a-04154cecaef7" [ 1277.263530] env[61962]: _type = "Task" [ 1277.263530] env[61962]: } to complete. {{(pid=61962) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1277.272524] env[61962]: DEBUG oslo_vmware.api [None req-39280a80-f6a9-4818-9c77-d5be7d130e72 tempest-ServersTestFqdnHostnames-1235394237 tempest-ServersTestFqdnHostnames-1235394237-project-member] Task: {'id': session[5210918f-aadc-9b29-42fa-0929c2e42627]52cf166f-a468-7926-279a-04154cecaef7, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61962) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1277.324736] env[61962]: DEBUG nova.virt.vmwareapi.vmops [None req-7546295f-5c69-4e76-85a0-ed1a4d4adbfc tempest-ServerRescueTestJSON-985981031 tempest-ServerRescueTestJSON-985981031-project-member] [instance: ff7d9da5-bc9a-41f2-a2e4-521d87b22117] Unregistered the VM {{(pid=61962) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 1277.325023] env[61962]: DEBUG nova.virt.vmwareapi.vmops [None req-7546295f-5c69-4e76-85a0-ed1a4d4adbfc tempest-ServerRescueTestJSON-985981031 tempest-ServerRescueTestJSON-985981031-project-member] [instance: ff7d9da5-bc9a-41f2-a2e4-521d87b22117] Deleting contents of the VM from datastore datastore2 {{(pid=61962) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 1277.325212] env[61962]: DEBUG nova.virt.vmwareapi.ds_util [None req-7546295f-5c69-4e76-85a0-ed1a4d4adbfc tempest-ServerRescueTestJSON-985981031 tempest-ServerRescueTestJSON-985981031-project-member] Deleting the datastore file [datastore2] ff7d9da5-bc9a-41f2-a2e4-521d87b22117 {{(pid=61962) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1277.325509] env[61962]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-837b6299-ea61-4d7d-9520-69c1580cac05 {{(pid=61962) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1277.333047] env[61962]: DEBUG oslo_vmware.api [None req-7546295f-5c69-4e76-85a0-ed1a4d4adbfc tempest-ServerRescueTestJSON-985981031 tempest-ServerRescueTestJSON-985981031-project-member] Waiting for the task: (returnval){ [ 1277.333047] env[61962]: value = "task-4892022" [ 1277.333047] env[61962]: _type = "Task" [ 1277.333047] env[61962]: } to complete. {{(pid=61962) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1277.341064] env[61962]: DEBUG oslo_vmware.api [None req-7546295f-5c69-4e76-85a0-ed1a4d4adbfc tempest-ServerRescueTestJSON-985981031 tempest-ServerRescueTestJSON-985981031-project-member] Task: {'id': task-4892022, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61962) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1277.774801] env[61962]: DEBUG nova.virt.vmwareapi.vmops [None req-39280a80-f6a9-4818-9c77-d5be7d130e72 tempest-ServersTestFqdnHostnames-1235394237 tempest-ServersTestFqdnHostnames-1235394237-project-member] [instance: bf82c44c-95ce-4e4c-a32f-444bba1ef5cd] Preparing fetch location {{(pid=61962) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 1277.775074] env[61962]: DEBUG nova.virt.vmwareapi.ds_util [None req-39280a80-f6a9-4818-9c77-d5be7d130e72 tempest-ServersTestFqdnHostnames-1235394237 tempest-ServersTestFqdnHostnames-1235394237-project-member] Creating directory with path [datastore2] vmware_temp/f122f0b5-825b-4ddb-884b-4c3ab53250dc/f684bb17-3ab2-4bd5-a19e-4c36c57d0ebe {{(pid=61962) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1277.775373] env[61962]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-24d023e5-9272-4260-bfd2-b3840ac9eb24 {{(pid=61962) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1277.788939] env[61962]: DEBUG nova.virt.vmwareapi.ds_util [None req-39280a80-f6a9-4818-9c77-d5be7d130e72 tempest-ServersTestFqdnHostnames-1235394237 tempest-ServersTestFqdnHostnames-1235394237-project-member] Created directory with path [datastore2] vmware_temp/f122f0b5-825b-4ddb-884b-4c3ab53250dc/f684bb17-3ab2-4bd5-a19e-4c36c57d0ebe {{(pid=61962) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1277.789200] env[61962]: DEBUG nova.virt.vmwareapi.vmops [None req-39280a80-f6a9-4818-9c77-d5be7d130e72 tempest-ServersTestFqdnHostnames-1235394237 tempest-ServersTestFqdnHostnames-1235394237-project-member] [instance: bf82c44c-95ce-4e4c-a32f-444bba1ef5cd] Fetch image to [datastore2] vmware_temp/f122f0b5-825b-4ddb-884b-4c3ab53250dc/f684bb17-3ab2-4bd5-a19e-4c36c57d0ebe/tmp-sparse.vmdk {{(pid=61962) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 1277.789388] env[61962]: DEBUG nova.virt.vmwareapi.vmops [None req-39280a80-f6a9-4818-9c77-d5be7d130e72 tempest-ServersTestFqdnHostnames-1235394237 tempest-ServersTestFqdnHostnames-1235394237-project-member] [instance: bf82c44c-95ce-4e4c-a32f-444bba1ef5cd] Downloading image file data f684bb17-3ab2-4bd5-a19e-4c36c57d0ebe to [datastore2] vmware_temp/f122f0b5-825b-4ddb-884b-4c3ab53250dc/f684bb17-3ab2-4bd5-a19e-4c36c57d0ebe/tmp-sparse.vmdk on the data store datastore2 {{(pid=61962) _fetch_image_as_file /opt/stack/nova/nova/virt/vmwareapi/vmops.py:399}} [ 1277.790195] env[61962]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c22e7907-249e-4b4c-af65-7046706a0039 {{(pid=61962) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1277.797884] env[61962]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2c3003f4-e0ba-42f9-a706-e9d3fb17c8dc {{(pid=61962) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1277.808213] env[61962]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cd918fe5-9626-47d4-a816-637300640c67 {{(pid=61962) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1277.844227] env[61962]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9158c470-1366-4f6b-82ba-ca34aa37ad11 {{(pid=61962) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1277.852338] env[61962]: DEBUG oslo_vmware.api [None req-7546295f-5c69-4e76-85a0-ed1a4d4adbfc tempest-ServerRescueTestJSON-985981031 tempest-ServerRescueTestJSON-985981031-project-member] Task: {'id': task-4892022, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.094028} completed successfully. {{(pid=61962) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1277.853765] env[61962]: DEBUG nova.virt.vmwareapi.ds_util [None req-7546295f-5c69-4e76-85a0-ed1a4d4adbfc tempest-ServerRescueTestJSON-985981031 tempest-ServerRescueTestJSON-985981031-project-member] Deleted the datastore file {{(pid=61962) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1277.853958] env[61962]: DEBUG nova.virt.vmwareapi.vmops [None req-7546295f-5c69-4e76-85a0-ed1a4d4adbfc tempest-ServerRescueTestJSON-985981031 tempest-ServerRescueTestJSON-985981031-project-member] [instance: ff7d9da5-bc9a-41f2-a2e4-521d87b22117] Deleted contents of the VM from datastore datastore2 {{(pid=61962) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 1277.854148] env[61962]: DEBUG nova.virt.vmwareapi.vmops [None req-7546295f-5c69-4e76-85a0-ed1a4d4adbfc tempest-ServerRescueTestJSON-985981031 tempest-ServerRescueTestJSON-985981031-project-member] [instance: ff7d9da5-bc9a-41f2-a2e4-521d87b22117] Instance destroyed {{(pid=61962) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1277.854322] env[61962]: INFO nova.compute.manager [None req-7546295f-5c69-4e76-85a0-ed1a4d4adbfc tempest-ServerRescueTestJSON-985981031 tempest-ServerRescueTestJSON-985981031-project-member] [instance: ff7d9da5-bc9a-41f2-a2e4-521d87b22117] Took 0.61 seconds to destroy the instance on the hypervisor. [ 1277.856377] env[61962]: DEBUG nova.compute.claims [None req-7546295f-5c69-4e76-85a0-ed1a4d4adbfc tempest-ServerRescueTestJSON-985981031 tempest-ServerRescueTestJSON-985981031-project-member] [instance: ff7d9da5-bc9a-41f2-a2e4-521d87b22117] Aborting claim: {{(pid=61962) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 1277.856553] env[61962]: DEBUG oslo_concurrency.lockutils [None req-7546295f-5c69-4e76-85a0-ed1a4d4adbfc tempest-ServerRescueTestJSON-985981031 tempest-ServerRescueTestJSON-985981031-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=61962) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1277.856766] env[61962]: DEBUG oslo_concurrency.lockutils [None req-7546295f-5c69-4e76-85a0-ed1a4d4adbfc tempest-ServerRescueTestJSON-985981031 tempest-ServerRescueTestJSON-985981031-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 0.000s {{(pid=61962) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1277.859643] env[61962]: DEBUG oslo_vmware.service [-] Invoking SessionManager.AcquireGenericServiceTicket with opID=oslo.vmware-68a10c5f-8b91-44a0-9afa-680de89116ae {{(pid=61962) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1277.885249] env[61962]: DEBUG nova.virt.vmwareapi.images [None req-39280a80-f6a9-4818-9c77-d5be7d130e72 tempest-ServersTestFqdnHostnames-1235394237 tempest-ServersTestFqdnHostnames-1235394237-project-member] [instance: bf82c44c-95ce-4e4c-a32f-444bba1ef5cd] Downloading image file data f684bb17-3ab2-4bd5-a19e-4c36c57d0ebe to the data store datastore2 {{(pid=61962) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:245}} [ 1278.033796] env[61962]: DEBUG oslo_vmware.rw_handles [None req-39280a80-f6a9-4818-9c77-d5be7d130e72 tempest-ServersTestFqdnHostnames-1235394237 tempest-ServersTestFqdnHostnames-1235394237-project-member] Creating HTTP connection to write to file with size = 21318656 and URL = https://esx7c1n3.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/f122f0b5-825b-4ddb-884b-4c3ab53250dc/f684bb17-3ab2-4bd5-a19e-4c36c57d0ebe/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=61962) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 1278.092902] env[61962]: DEBUG oslo_vmware.rw_handles [None req-39280a80-f6a9-4818-9c77-d5be7d130e72 tempest-ServersTestFqdnHostnames-1235394237 tempest-ServersTestFqdnHostnames-1235394237-project-member] Completed reading data from the image iterator. {{(pid=61962) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 1278.093107] env[61962]: DEBUG oslo_vmware.rw_handles [None req-39280a80-f6a9-4818-9c77-d5be7d130e72 tempest-ServersTestFqdnHostnames-1235394237 tempest-ServersTestFqdnHostnames-1235394237-project-member] Closing write handle for https://esx7c1n3.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/f122f0b5-825b-4ddb-884b-4c3ab53250dc/f684bb17-3ab2-4bd5-a19e-4c36c57d0ebe/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=61962) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:281}} [ 1278.338283] env[61962]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3352d3e2-7600-47e8-a1cc-a440f99adba6 {{(pid=61962) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1278.346340] env[61962]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3ebe41a0-51e5-45c2-a65f-7fd8f6010954 {{(pid=61962) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1278.376100] env[61962]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cbce1903-3f39-47ee-b4e0-d2f484537ab6 {{(pid=61962) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1278.383760] env[61962]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-114fce64-ea18-4d6a-8f0b-ee1387f9a807 {{(pid=61962) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1278.397938] env[61962]: DEBUG nova.compute.provider_tree [None req-7546295f-5c69-4e76-85a0-ed1a4d4adbfc tempest-ServerRescueTestJSON-985981031 tempest-ServerRescueTestJSON-985981031-project-member] Inventory has not changed in ProviderTree for provider: 5a20dc57-fddd-49ec-bab5-953a03eebaa1 {{(pid=61962) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1278.406374] env[61962]: DEBUG nova.scheduler.client.report [None req-7546295f-5c69-4e76-85a0-ed1a4d4adbfc tempest-ServerRescueTestJSON-985981031 tempest-ServerRescueTestJSON-985981031-project-member] Inventory has not changed for provider 5a20dc57-fddd-49ec-bab5-953a03eebaa1 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 96, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61962) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1278.423133] env[61962]: DEBUG oslo_concurrency.lockutils [None req-7546295f-5c69-4e76-85a0-ed1a4d4adbfc tempest-ServerRescueTestJSON-985981031 tempest-ServerRescueTestJSON-985981031-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 0.566s {{(pid=61962) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1278.423680] env[61962]: ERROR nova.compute.manager [None req-7546295f-5c69-4e76-85a0-ed1a4d4adbfc tempest-ServerRescueTestJSON-985981031 tempest-ServerRescueTestJSON-985981031-project-member] [instance: ff7d9da5-bc9a-41f2-a2e4-521d87b22117] Failed to build and run instance: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1278.423680] env[61962]: Faults: ['InvalidArgument'] [ 1278.423680] env[61962]: ERROR nova.compute.manager [instance: ff7d9da5-bc9a-41f2-a2e4-521d87b22117] Traceback (most recent call last): [ 1278.423680] env[61962]: ERROR nova.compute.manager [instance: ff7d9da5-bc9a-41f2-a2e4-521d87b22117] File "/opt/stack/nova/nova/compute/manager.py", line 2633, in _build_and_run_instance [ 1278.423680] env[61962]: ERROR nova.compute.manager [instance: ff7d9da5-bc9a-41f2-a2e4-521d87b22117] self.driver.spawn(context, instance, image_meta, [ 1278.423680] env[61962]: ERROR nova.compute.manager [instance: ff7d9da5-bc9a-41f2-a2e4-521d87b22117] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 1278.423680] env[61962]: ERROR nova.compute.manager [instance: ff7d9da5-bc9a-41f2-a2e4-521d87b22117] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1278.423680] env[61962]: ERROR nova.compute.manager [instance: ff7d9da5-bc9a-41f2-a2e4-521d87b22117] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 1278.423680] env[61962]: ERROR nova.compute.manager [instance: ff7d9da5-bc9a-41f2-a2e4-521d87b22117] self._fetch_image_if_missing(context, vi) [ 1278.423680] env[61962]: ERROR nova.compute.manager [instance: ff7d9da5-bc9a-41f2-a2e4-521d87b22117] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 1278.423680] env[61962]: ERROR nova.compute.manager [instance: ff7d9da5-bc9a-41f2-a2e4-521d87b22117] image_cache(vi, tmp_image_ds_loc) [ 1278.423680] env[61962]: ERROR nova.compute.manager [instance: ff7d9da5-bc9a-41f2-a2e4-521d87b22117] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 1278.424324] env[61962]: ERROR nova.compute.manager [instance: ff7d9da5-bc9a-41f2-a2e4-521d87b22117] vm_util.copy_virtual_disk( [ 1278.424324] env[61962]: ERROR nova.compute.manager [instance: ff7d9da5-bc9a-41f2-a2e4-521d87b22117] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 1278.424324] env[61962]: ERROR nova.compute.manager [instance: ff7d9da5-bc9a-41f2-a2e4-521d87b22117] session._wait_for_task(vmdk_copy_task) [ 1278.424324] env[61962]: ERROR nova.compute.manager [instance: ff7d9da5-bc9a-41f2-a2e4-521d87b22117] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 1278.424324] env[61962]: ERROR nova.compute.manager [instance: ff7d9da5-bc9a-41f2-a2e4-521d87b22117] return self.wait_for_task(task_ref) [ 1278.424324] env[61962]: ERROR nova.compute.manager [instance: ff7d9da5-bc9a-41f2-a2e4-521d87b22117] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 1278.424324] env[61962]: ERROR nova.compute.manager [instance: ff7d9da5-bc9a-41f2-a2e4-521d87b22117] return evt.wait() [ 1278.424324] env[61962]: ERROR nova.compute.manager [instance: ff7d9da5-bc9a-41f2-a2e4-521d87b22117] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 1278.424324] env[61962]: ERROR nova.compute.manager [instance: ff7d9da5-bc9a-41f2-a2e4-521d87b22117] result = hub.switch() [ 1278.424324] env[61962]: ERROR nova.compute.manager [instance: ff7d9da5-bc9a-41f2-a2e4-521d87b22117] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 1278.424324] env[61962]: ERROR nova.compute.manager [instance: ff7d9da5-bc9a-41f2-a2e4-521d87b22117] return self.greenlet.switch() [ 1278.424324] env[61962]: ERROR nova.compute.manager [instance: ff7d9da5-bc9a-41f2-a2e4-521d87b22117] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 1278.424324] env[61962]: ERROR nova.compute.manager [instance: ff7d9da5-bc9a-41f2-a2e4-521d87b22117] self.f(*self.args, **self.kw) [ 1278.424900] env[61962]: ERROR nova.compute.manager [instance: ff7d9da5-bc9a-41f2-a2e4-521d87b22117] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 1278.424900] env[61962]: ERROR nova.compute.manager [instance: ff7d9da5-bc9a-41f2-a2e4-521d87b22117] raise exceptions.translate_fault(task_info.error) [ 1278.424900] env[61962]: ERROR nova.compute.manager [instance: ff7d9da5-bc9a-41f2-a2e4-521d87b22117] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1278.424900] env[61962]: ERROR nova.compute.manager [instance: ff7d9da5-bc9a-41f2-a2e4-521d87b22117] Faults: ['InvalidArgument'] [ 1278.424900] env[61962]: ERROR nova.compute.manager [instance: ff7d9da5-bc9a-41f2-a2e4-521d87b22117] [ 1278.424900] env[61962]: DEBUG nova.compute.utils [None req-7546295f-5c69-4e76-85a0-ed1a4d4adbfc tempest-ServerRescueTestJSON-985981031 tempest-ServerRescueTestJSON-985981031-project-member] [instance: ff7d9da5-bc9a-41f2-a2e4-521d87b22117] VimFaultException {{(pid=61962) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 1278.425931] env[61962]: DEBUG nova.compute.manager [None req-7546295f-5c69-4e76-85a0-ed1a4d4adbfc tempest-ServerRescueTestJSON-985981031 tempest-ServerRescueTestJSON-985981031-project-member] [instance: ff7d9da5-bc9a-41f2-a2e4-521d87b22117] Build of instance ff7d9da5-bc9a-41f2-a2e4-521d87b22117 was re-scheduled: A specified parameter was not correct: fileType [ 1278.425931] env[61962]: Faults: ['InvalidArgument'] {{(pid=61962) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2472}} [ 1278.426326] env[61962]: DEBUG nova.compute.manager [None req-7546295f-5c69-4e76-85a0-ed1a4d4adbfc tempest-ServerRescueTestJSON-985981031 tempest-ServerRescueTestJSON-985981031-project-member] [instance: ff7d9da5-bc9a-41f2-a2e4-521d87b22117] Unplugging VIFs for instance {{(pid=61962) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:2998}} [ 1278.426495] env[61962]: DEBUG nova.compute.manager [None req-7546295f-5c69-4e76-85a0-ed1a4d4adbfc tempest-ServerRescueTestJSON-985981031 tempest-ServerRescueTestJSON-985981031-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=61962) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3021}} [ 1278.426665] env[61962]: DEBUG nova.compute.manager [None req-7546295f-5c69-4e76-85a0-ed1a4d4adbfc tempest-ServerRescueTestJSON-985981031 tempest-ServerRescueTestJSON-985981031-project-member] [instance: ff7d9da5-bc9a-41f2-a2e4-521d87b22117] Deallocating network for instance {{(pid=61962) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2281}} [ 1278.426826] env[61962]: DEBUG nova.network.neutron [None req-7546295f-5c69-4e76-85a0-ed1a4d4adbfc tempest-ServerRescueTestJSON-985981031 tempest-ServerRescueTestJSON-985981031-project-member] [instance: ff7d9da5-bc9a-41f2-a2e4-521d87b22117] deallocate_for_instance() {{(pid=61962) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1278.804246] env[61962]: DEBUG nova.network.neutron [None req-7546295f-5c69-4e76-85a0-ed1a4d4adbfc tempest-ServerRescueTestJSON-985981031 tempest-ServerRescueTestJSON-985981031-project-member] [instance: ff7d9da5-bc9a-41f2-a2e4-521d87b22117] Updating instance_info_cache with network_info: [] {{(pid=61962) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1278.819021] env[61962]: INFO nova.compute.manager [None req-7546295f-5c69-4e76-85a0-ed1a4d4adbfc tempest-ServerRescueTestJSON-985981031 tempest-ServerRescueTestJSON-985981031-project-member] [instance: ff7d9da5-bc9a-41f2-a2e4-521d87b22117] Took 0.39 seconds to deallocate network for instance. [ 1278.939021] env[61962]: INFO nova.scheduler.client.report [None req-7546295f-5c69-4e76-85a0-ed1a4d4adbfc tempest-ServerRescueTestJSON-985981031 tempest-ServerRescueTestJSON-985981031-project-member] Deleted allocations for instance ff7d9da5-bc9a-41f2-a2e4-521d87b22117 [ 1278.962816] env[61962]: DEBUG oslo_concurrency.lockutils [None req-7546295f-5c69-4e76-85a0-ed1a4d4adbfc tempest-ServerRescueTestJSON-985981031 tempest-ServerRescueTestJSON-985981031-project-member] Lock "ff7d9da5-bc9a-41f2-a2e4-521d87b22117" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 679.617s {{(pid=61962) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1278.964893] env[61962]: DEBUG oslo_concurrency.lockutils [None req-87050117-bd22-4c08-b530-550627895dc9 tempest-ServerRescueTestJSON-985981031 tempest-ServerRescueTestJSON-985981031-project-member] Lock "ff7d9da5-bc9a-41f2-a2e4-521d87b22117" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 480.421s {{(pid=61962) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1278.964893] env[61962]: DEBUG oslo_concurrency.lockutils [None req-87050117-bd22-4c08-b530-550627895dc9 tempest-ServerRescueTestJSON-985981031 tempest-ServerRescueTestJSON-985981031-project-member] Acquiring lock "ff7d9da5-bc9a-41f2-a2e4-521d87b22117-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=61962) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1278.965656] env[61962]: DEBUG oslo_concurrency.lockutils [None req-87050117-bd22-4c08-b530-550627895dc9 tempest-ServerRescueTestJSON-985981031 tempest-ServerRescueTestJSON-985981031-project-member] Lock "ff7d9da5-bc9a-41f2-a2e4-521d87b22117-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=61962) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1278.965913] env[61962]: DEBUG oslo_concurrency.lockutils [None req-87050117-bd22-4c08-b530-550627895dc9 tempest-ServerRescueTestJSON-985981031 tempest-ServerRescueTestJSON-985981031-project-member] Lock "ff7d9da5-bc9a-41f2-a2e4-521d87b22117-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.001s {{(pid=61962) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1278.968775] env[61962]: INFO nova.compute.manager [None req-87050117-bd22-4c08-b530-550627895dc9 tempest-ServerRescueTestJSON-985981031 tempest-ServerRescueTestJSON-985981031-project-member] [instance: ff7d9da5-bc9a-41f2-a2e4-521d87b22117] Terminating instance [ 1278.970667] env[61962]: DEBUG oslo_concurrency.lockutils [None req-87050117-bd22-4c08-b530-550627895dc9 tempest-ServerRescueTestJSON-985981031 tempest-ServerRescueTestJSON-985981031-project-member] Acquiring lock "refresh_cache-ff7d9da5-bc9a-41f2-a2e4-521d87b22117" {{(pid=61962) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1278.970867] env[61962]: DEBUG oslo_concurrency.lockutils [None req-87050117-bd22-4c08-b530-550627895dc9 tempest-ServerRescueTestJSON-985981031 tempest-ServerRescueTestJSON-985981031-project-member] Acquired lock "refresh_cache-ff7d9da5-bc9a-41f2-a2e4-521d87b22117" {{(pid=61962) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1278.971028] env[61962]: DEBUG nova.network.neutron [None req-87050117-bd22-4c08-b530-550627895dc9 tempest-ServerRescueTestJSON-985981031 tempest-ServerRescueTestJSON-985981031-project-member] [instance: ff7d9da5-bc9a-41f2-a2e4-521d87b22117] Building network info cache for instance {{(pid=61962) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 1278.977977] env[61962]: DEBUG nova.compute.manager [None req-e958cfa4-b00d-4980-bc5a-c65b69f7ca2e tempest-FloatingIPsAssociationNegativeTestJSON-1886607009 tempest-FloatingIPsAssociationNegativeTestJSON-1886607009-project-member] [instance: 1f7429fe-e6f3-4b0a-bae8-38ead4607ae7] Starting instance... {{(pid=61962) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2424}} [ 1279.008826] env[61962]: DEBUG nova.network.neutron [None req-87050117-bd22-4c08-b530-550627895dc9 tempest-ServerRescueTestJSON-985981031 tempest-ServerRescueTestJSON-985981031-project-member] [instance: ff7d9da5-bc9a-41f2-a2e4-521d87b22117] Instance cache missing network info. {{(pid=61962) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 1279.037718] env[61962]: DEBUG oslo_concurrency.lockutils [None req-e958cfa4-b00d-4980-bc5a-c65b69f7ca2e tempest-FloatingIPsAssociationNegativeTestJSON-1886607009 tempest-FloatingIPsAssociationNegativeTestJSON-1886607009-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61962) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1279.037967] env[61962]: DEBUG oslo_concurrency.lockutils [None req-e958cfa4-b00d-4980-bc5a-c65b69f7ca2e tempest-FloatingIPsAssociationNegativeTestJSON-1886607009 tempest-FloatingIPsAssociationNegativeTestJSON-1886607009-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=61962) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1279.039807] env[61962]: INFO nova.compute.claims [None req-e958cfa4-b00d-4980-bc5a-c65b69f7ca2e tempest-FloatingIPsAssociationNegativeTestJSON-1886607009 tempest-FloatingIPsAssociationNegativeTestJSON-1886607009-project-member] [instance: 1f7429fe-e6f3-4b0a-bae8-38ead4607ae7] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1279.180117] env[61962]: DEBUG nova.network.neutron [None req-87050117-bd22-4c08-b530-550627895dc9 tempest-ServerRescueTestJSON-985981031 tempest-ServerRescueTestJSON-985981031-project-member] [instance: ff7d9da5-bc9a-41f2-a2e4-521d87b22117] Updating instance_info_cache with network_info: [] {{(pid=61962) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1279.192139] env[61962]: DEBUG oslo_concurrency.lockutils [None req-87050117-bd22-4c08-b530-550627895dc9 tempest-ServerRescueTestJSON-985981031 tempest-ServerRescueTestJSON-985981031-project-member] Releasing lock "refresh_cache-ff7d9da5-bc9a-41f2-a2e4-521d87b22117" {{(pid=61962) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1279.192610] env[61962]: DEBUG nova.compute.manager [None req-87050117-bd22-4c08-b530-550627895dc9 tempest-ServerRescueTestJSON-985981031 tempest-ServerRescueTestJSON-985981031-project-member] [instance: ff7d9da5-bc9a-41f2-a2e4-521d87b22117] Start destroying the instance on the hypervisor. {{(pid=61962) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3142}} [ 1279.192840] env[61962]: DEBUG nova.virt.vmwareapi.vmops [None req-87050117-bd22-4c08-b530-550627895dc9 tempest-ServerRescueTestJSON-985981031 tempest-ServerRescueTestJSON-985981031-project-member] [instance: ff7d9da5-bc9a-41f2-a2e4-521d87b22117] Destroying instance {{(pid=61962) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1279.193529] env[61962]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-b711e847-6c6b-4cef-a6be-8ab1b0475544 {{(pid=61962) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1279.207334] env[61962]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8ede834d-d329-483b-8f88-8699f601a3a0 {{(pid=61962) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1279.241392] env[61962]: WARNING nova.virt.vmwareapi.vmops [None req-87050117-bd22-4c08-b530-550627895dc9 tempest-ServerRescueTestJSON-985981031 tempest-ServerRescueTestJSON-985981031-project-member] [instance: ff7d9da5-bc9a-41f2-a2e4-521d87b22117] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance ff7d9da5-bc9a-41f2-a2e4-521d87b22117 could not be found. [ 1279.241603] env[61962]: DEBUG nova.virt.vmwareapi.vmops [None req-87050117-bd22-4c08-b530-550627895dc9 tempest-ServerRescueTestJSON-985981031 tempest-ServerRescueTestJSON-985981031-project-member] [instance: ff7d9da5-bc9a-41f2-a2e4-521d87b22117] Instance destroyed {{(pid=61962) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1279.241786] env[61962]: INFO nova.compute.manager [None req-87050117-bd22-4c08-b530-550627895dc9 tempest-ServerRescueTestJSON-985981031 tempest-ServerRescueTestJSON-985981031-project-member] [instance: ff7d9da5-bc9a-41f2-a2e4-521d87b22117] Took 0.05 seconds to destroy the instance on the hypervisor. [ 1279.242080] env[61962]: DEBUG oslo.service.loopingcall [None req-87050117-bd22-4c08-b530-550627895dc9 tempest-ServerRescueTestJSON-985981031 tempest-ServerRescueTestJSON-985981031-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61962) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1279.244817] env[61962]: DEBUG nova.compute.manager [-] [instance: ff7d9da5-bc9a-41f2-a2e4-521d87b22117] Deallocating network for instance {{(pid=61962) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2281}} [ 1279.244921] env[61962]: DEBUG nova.network.neutron [-] [instance: ff7d9da5-bc9a-41f2-a2e4-521d87b22117] deallocate_for_instance() {{(pid=61962) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1279.265568] env[61962]: DEBUG nova.network.neutron [-] [instance: ff7d9da5-bc9a-41f2-a2e4-521d87b22117] Instance cache missing network info. {{(pid=61962) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 1279.273291] env[61962]: DEBUG nova.network.neutron [-] [instance: ff7d9da5-bc9a-41f2-a2e4-521d87b22117] Updating instance_info_cache with network_info: [] {{(pid=61962) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1279.284876] env[61962]: INFO nova.compute.manager [-] [instance: ff7d9da5-bc9a-41f2-a2e4-521d87b22117] Took 0.04 seconds to deallocate network for instance. [ 1279.384572] env[61962]: DEBUG oslo_concurrency.lockutils [None req-87050117-bd22-4c08-b530-550627895dc9 tempest-ServerRescueTestJSON-985981031 tempest-ServerRescueTestJSON-985981031-project-member] Lock "ff7d9da5-bc9a-41f2-a2e4-521d87b22117" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 0.420s {{(pid=61962) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1279.388151] env[61962]: DEBUG oslo_concurrency.lockutils [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Lock "ff7d9da5-bc9a-41f2-a2e4-521d87b22117" acquired by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: waited 148.871s {{(pid=61962) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1279.388346] env[61962]: INFO nova.compute.manager [None req-ba337279-4320-40ca-b616-7e590432f203 None None] [instance: ff7d9da5-bc9a-41f2-a2e4-521d87b22117] During sync_power_state the instance has a pending task (deleting). Skip. [ 1279.388518] env[61962]: DEBUG oslo_concurrency.lockutils [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Lock "ff7d9da5-bc9a-41f2-a2e4-521d87b22117" "released" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: held 0.000s {{(pid=61962) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1279.445363] env[61962]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-eb1f356d-9fbc-416b-86c2-9d1978877ad2 {{(pid=61962) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1279.453926] env[61962]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9ed34133-64b1-4b87-a62c-9ccc1265c52e {{(pid=61962) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1279.484898] env[61962]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e3f0a8f2-d164-4ade-bd1e-1977f249314c {{(pid=61962) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1279.493330] env[61962]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-295ed193-f0cb-4615-96bc-f8509666cee3 {{(pid=61962) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1279.507289] env[61962]: DEBUG nova.compute.provider_tree [None req-e958cfa4-b00d-4980-bc5a-c65b69f7ca2e tempest-FloatingIPsAssociationNegativeTestJSON-1886607009 tempest-FloatingIPsAssociationNegativeTestJSON-1886607009-project-member] Inventory has not changed in ProviderTree for provider: 5a20dc57-fddd-49ec-bab5-953a03eebaa1 {{(pid=61962) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1279.517989] env[61962]: DEBUG nova.scheduler.client.report [None req-e958cfa4-b00d-4980-bc5a-c65b69f7ca2e tempest-FloatingIPsAssociationNegativeTestJSON-1886607009 tempest-FloatingIPsAssociationNegativeTestJSON-1886607009-project-member] Inventory has not changed for provider 5a20dc57-fddd-49ec-bab5-953a03eebaa1 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 96, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61962) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1279.534123] env[61962]: DEBUG oslo_concurrency.lockutils [None req-e958cfa4-b00d-4980-bc5a-c65b69f7ca2e tempest-FloatingIPsAssociationNegativeTestJSON-1886607009 tempest-FloatingIPsAssociationNegativeTestJSON-1886607009-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.496s {{(pid=61962) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1279.534682] env[61962]: DEBUG nova.compute.manager [None req-e958cfa4-b00d-4980-bc5a-c65b69f7ca2e tempest-FloatingIPsAssociationNegativeTestJSON-1886607009 tempest-FloatingIPsAssociationNegativeTestJSON-1886607009-project-member] [instance: 1f7429fe-e6f3-4b0a-bae8-38ead4607ae7] Start building networks asynchronously for instance. {{(pid=61962) _build_resources /opt/stack/nova/nova/compute/manager.py:2821}} [ 1279.568908] env[61962]: DEBUG nova.compute.utils [None req-e958cfa4-b00d-4980-bc5a-c65b69f7ca2e tempest-FloatingIPsAssociationNegativeTestJSON-1886607009 tempest-FloatingIPsAssociationNegativeTestJSON-1886607009-project-member] Using /dev/sd instead of None {{(pid=61962) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1279.570425] env[61962]: DEBUG nova.compute.manager [None req-e958cfa4-b00d-4980-bc5a-c65b69f7ca2e tempest-FloatingIPsAssociationNegativeTestJSON-1886607009 tempest-FloatingIPsAssociationNegativeTestJSON-1886607009-project-member] [instance: 1f7429fe-e6f3-4b0a-bae8-38ead4607ae7] Allocating IP information in the background. {{(pid=61962) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1973}} [ 1279.570599] env[61962]: DEBUG nova.network.neutron [None req-e958cfa4-b00d-4980-bc5a-c65b69f7ca2e tempest-FloatingIPsAssociationNegativeTestJSON-1886607009 tempest-FloatingIPsAssociationNegativeTestJSON-1886607009-project-member] [instance: 1f7429fe-e6f3-4b0a-bae8-38ead4607ae7] allocate_for_instance() {{(pid=61962) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 1279.579679] env[61962]: DEBUG nova.compute.manager [None req-e958cfa4-b00d-4980-bc5a-c65b69f7ca2e tempest-FloatingIPsAssociationNegativeTestJSON-1886607009 tempest-FloatingIPsAssociationNegativeTestJSON-1886607009-project-member] [instance: 1f7429fe-e6f3-4b0a-bae8-38ead4607ae7] Start building block device mappings for instance. {{(pid=61962) _build_resources /opt/stack/nova/nova/compute/manager.py:2856}} [ 1279.628086] env[61962]: DEBUG nova.policy [None req-e958cfa4-b00d-4980-bc5a-c65b69f7ca2e tempest-FloatingIPsAssociationNegativeTestJSON-1886607009 tempest-FloatingIPsAssociationNegativeTestJSON-1886607009-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '55cea10ca43048a391324236ce5673a0', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'e816704de44649099e96127498eed7e9', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61962) authorize /opt/stack/nova/nova/policy.py:203}} [ 1279.648054] env[61962]: DEBUG nova.compute.manager [None req-e958cfa4-b00d-4980-bc5a-c65b69f7ca2e tempest-FloatingIPsAssociationNegativeTestJSON-1886607009 tempest-FloatingIPsAssociationNegativeTestJSON-1886607009-project-member] [instance: 1f7429fe-e6f3-4b0a-bae8-38ead4607ae7] Start spawning the instance on the hypervisor. {{(pid=61962) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2630}} [ 1279.676618] env[61962]: DEBUG nova.virt.hardware [None req-e958cfa4-b00d-4980-bc5a-c65b69f7ca2e tempest-FloatingIPsAssociationNegativeTestJSON-1886607009 tempest-FloatingIPsAssociationNegativeTestJSON-1886607009-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-12-01T12:09:23Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=128,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-12-01T12:09:07Z,direct_url=,disk_format='vmdk',id=f684bb17-3ab2-4bd5-a19e-4c36c57d0ebe,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='eef556fb5c4f49b889491ae31a496de5',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-12-01T12:09:08Z,virtual_size=,visibility=), allow threads: False {{(pid=61962) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1279.676961] env[61962]: DEBUG nova.virt.hardware [None req-e958cfa4-b00d-4980-bc5a-c65b69f7ca2e tempest-FloatingIPsAssociationNegativeTestJSON-1886607009 tempest-FloatingIPsAssociationNegativeTestJSON-1886607009-project-member] Flavor limits 0:0:0 {{(pid=61962) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1279.677157] env[61962]: DEBUG nova.virt.hardware [None req-e958cfa4-b00d-4980-bc5a-c65b69f7ca2e tempest-FloatingIPsAssociationNegativeTestJSON-1886607009 tempest-FloatingIPsAssociationNegativeTestJSON-1886607009-project-member] Image limits 0:0:0 {{(pid=61962) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1279.677389] env[61962]: DEBUG nova.virt.hardware [None req-e958cfa4-b00d-4980-bc5a-c65b69f7ca2e tempest-FloatingIPsAssociationNegativeTestJSON-1886607009 tempest-FloatingIPsAssociationNegativeTestJSON-1886607009-project-member] Flavor pref 0:0:0 {{(pid=61962) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1279.677591] env[61962]: DEBUG nova.virt.hardware [None req-e958cfa4-b00d-4980-bc5a-c65b69f7ca2e tempest-FloatingIPsAssociationNegativeTestJSON-1886607009 tempest-FloatingIPsAssociationNegativeTestJSON-1886607009-project-member] Image pref 0:0:0 {{(pid=61962) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1279.677776] env[61962]: DEBUG nova.virt.hardware [None req-e958cfa4-b00d-4980-bc5a-c65b69f7ca2e tempest-FloatingIPsAssociationNegativeTestJSON-1886607009 tempest-FloatingIPsAssociationNegativeTestJSON-1886607009-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61962) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1279.678053] env[61962]: DEBUG nova.virt.hardware [None req-e958cfa4-b00d-4980-bc5a-c65b69f7ca2e tempest-FloatingIPsAssociationNegativeTestJSON-1886607009 tempest-FloatingIPsAssociationNegativeTestJSON-1886607009-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61962) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1279.678301] env[61962]: DEBUG nova.virt.hardware [None req-e958cfa4-b00d-4980-bc5a-c65b69f7ca2e tempest-FloatingIPsAssociationNegativeTestJSON-1886607009 tempest-FloatingIPsAssociationNegativeTestJSON-1886607009-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61962) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1279.678527] env[61962]: DEBUG nova.virt.hardware [None req-e958cfa4-b00d-4980-bc5a-c65b69f7ca2e tempest-FloatingIPsAssociationNegativeTestJSON-1886607009 tempest-FloatingIPsAssociationNegativeTestJSON-1886607009-project-member] Got 1 possible topologies {{(pid=61962) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1279.678729] env[61962]: DEBUG nova.virt.hardware [None req-e958cfa4-b00d-4980-bc5a-c65b69f7ca2e tempest-FloatingIPsAssociationNegativeTestJSON-1886607009 tempest-FloatingIPsAssociationNegativeTestJSON-1886607009-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61962) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1279.678939] env[61962]: DEBUG nova.virt.hardware [None req-e958cfa4-b00d-4980-bc5a-c65b69f7ca2e tempest-FloatingIPsAssociationNegativeTestJSON-1886607009 tempest-FloatingIPsAssociationNegativeTestJSON-1886607009-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61962) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1279.680031] env[61962]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a4fd56f3-efa4-43fe-9e7a-c3fd1585992e {{(pid=61962) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1279.689188] env[61962]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d765bcc5-0cad-47e0-a6d4-92a9f13da1b3 {{(pid=61962) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1280.076672] env[61962]: DEBUG nova.network.neutron [None req-e958cfa4-b00d-4980-bc5a-c65b69f7ca2e tempest-FloatingIPsAssociationNegativeTestJSON-1886607009 tempest-FloatingIPsAssociationNegativeTestJSON-1886607009-project-member] [instance: 1f7429fe-e6f3-4b0a-bae8-38ead4607ae7] Successfully created port: 7f590f4e-bb84-4d6e-a500-7c726b18a9c0 {{(pid=61962) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1280.860395] env[61962]: DEBUG nova.compute.manager [req-bed2301a-6a0d-4755-abd4-415b2571d265 req-9dce7e3c-581b-4746-be07-a987f9ef4f7d service nova] [instance: 1f7429fe-e6f3-4b0a-bae8-38ead4607ae7] Received event network-vif-plugged-7f590f4e-bb84-4d6e-a500-7c726b18a9c0 {{(pid=61962) external_instance_event /opt/stack/nova/nova/compute/manager.py:11210}} [ 1280.860395] env[61962]: DEBUG oslo_concurrency.lockutils [req-bed2301a-6a0d-4755-abd4-415b2571d265 req-9dce7e3c-581b-4746-be07-a987f9ef4f7d service nova] Acquiring lock "1f7429fe-e6f3-4b0a-bae8-38ead4607ae7-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=61962) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1280.860395] env[61962]: DEBUG oslo_concurrency.lockutils [req-bed2301a-6a0d-4755-abd4-415b2571d265 req-9dce7e3c-581b-4746-be07-a987f9ef4f7d service nova] Lock "1f7429fe-e6f3-4b0a-bae8-38ead4607ae7-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=61962) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1280.860395] env[61962]: DEBUG oslo_concurrency.lockutils [req-bed2301a-6a0d-4755-abd4-415b2571d265 req-9dce7e3c-581b-4746-be07-a987f9ef4f7d service nova] Lock "1f7429fe-e6f3-4b0a-bae8-38ead4607ae7-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=61962) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1280.860846] env[61962]: DEBUG nova.compute.manager [req-bed2301a-6a0d-4755-abd4-415b2571d265 req-9dce7e3c-581b-4746-be07-a987f9ef4f7d service nova] [instance: 1f7429fe-e6f3-4b0a-bae8-38ead4607ae7] No waiting events found dispatching network-vif-plugged-7f590f4e-bb84-4d6e-a500-7c726b18a9c0 {{(pid=61962) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1280.861310] env[61962]: WARNING nova.compute.manager [req-bed2301a-6a0d-4755-abd4-415b2571d265 req-9dce7e3c-581b-4746-be07-a987f9ef4f7d service nova] [instance: 1f7429fe-e6f3-4b0a-bae8-38ead4607ae7] Received unexpected event network-vif-plugged-7f590f4e-bb84-4d6e-a500-7c726b18a9c0 for instance with vm_state building and task_state spawning. [ 1280.955484] env[61962]: DEBUG nova.network.neutron [None req-e958cfa4-b00d-4980-bc5a-c65b69f7ca2e tempest-FloatingIPsAssociationNegativeTestJSON-1886607009 tempest-FloatingIPsAssociationNegativeTestJSON-1886607009-project-member] [instance: 1f7429fe-e6f3-4b0a-bae8-38ead4607ae7] Successfully updated port: 7f590f4e-bb84-4d6e-a500-7c726b18a9c0 {{(pid=61962) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1280.968395] env[61962]: DEBUG oslo_concurrency.lockutils [None req-e958cfa4-b00d-4980-bc5a-c65b69f7ca2e tempest-FloatingIPsAssociationNegativeTestJSON-1886607009 tempest-FloatingIPsAssociationNegativeTestJSON-1886607009-project-member] Acquiring lock "refresh_cache-1f7429fe-e6f3-4b0a-bae8-38ead4607ae7" {{(pid=61962) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1280.968546] env[61962]: DEBUG oslo_concurrency.lockutils [None req-e958cfa4-b00d-4980-bc5a-c65b69f7ca2e tempest-FloatingIPsAssociationNegativeTestJSON-1886607009 tempest-FloatingIPsAssociationNegativeTestJSON-1886607009-project-member] Acquired lock "refresh_cache-1f7429fe-e6f3-4b0a-bae8-38ead4607ae7" {{(pid=61962) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1280.968695] env[61962]: DEBUG nova.network.neutron [None req-e958cfa4-b00d-4980-bc5a-c65b69f7ca2e tempest-FloatingIPsAssociationNegativeTestJSON-1886607009 tempest-FloatingIPsAssociationNegativeTestJSON-1886607009-project-member] [instance: 1f7429fe-e6f3-4b0a-bae8-38ead4607ae7] Building network info cache for instance {{(pid=61962) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 1281.241756] env[61962]: DEBUG nova.network.neutron [None req-e958cfa4-b00d-4980-bc5a-c65b69f7ca2e tempest-FloatingIPsAssociationNegativeTestJSON-1886607009 tempest-FloatingIPsAssociationNegativeTestJSON-1886607009-project-member] [instance: 1f7429fe-e6f3-4b0a-bae8-38ead4607ae7] Instance cache missing network info. {{(pid=61962) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 1281.499235] env[61962]: DEBUG nova.network.neutron [None req-e958cfa4-b00d-4980-bc5a-c65b69f7ca2e tempest-FloatingIPsAssociationNegativeTestJSON-1886607009 tempest-FloatingIPsAssociationNegativeTestJSON-1886607009-project-member] [instance: 1f7429fe-e6f3-4b0a-bae8-38ead4607ae7] Updating instance_info_cache with network_info: [{"id": "7f590f4e-bb84-4d6e-a500-7c726b18a9c0", "address": "fa:16:3e:d7:2e:df", "network": {"id": "87194497-676a-49ac-8b3c-b5e3dded6aba", "bridge": "br-int", "label": "tempest-FloatingIPsAssociationNegativeTestJSON-757236527-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "e816704de44649099e96127498eed7e9", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "74c816b2-b8b0-432e-baac-662ed8ea0417", "external-id": "nsx-vlan-transportzone-776", "segmentation_id": 776, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap7f590f4e-bb", "ovs_interfaceid": "7f590f4e-bb84-4d6e-a500-7c726b18a9c0", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61962) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1281.515719] env[61962]: DEBUG oslo_concurrency.lockutils [None req-e958cfa4-b00d-4980-bc5a-c65b69f7ca2e tempest-FloatingIPsAssociationNegativeTestJSON-1886607009 tempest-FloatingIPsAssociationNegativeTestJSON-1886607009-project-member] Releasing lock "refresh_cache-1f7429fe-e6f3-4b0a-bae8-38ead4607ae7" {{(pid=61962) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1281.515959] env[61962]: DEBUG nova.compute.manager [None req-e958cfa4-b00d-4980-bc5a-c65b69f7ca2e tempest-FloatingIPsAssociationNegativeTestJSON-1886607009 tempest-FloatingIPsAssociationNegativeTestJSON-1886607009-project-member] [instance: 1f7429fe-e6f3-4b0a-bae8-38ead4607ae7] Instance network_info: |[{"id": "7f590f4e-bb84-4d6e-a500-7c726b18a9c0", "address": "fa:16:3e:d7:2e:df", "network": {"id": "87194497-676a-49ac-8b3c-b5e3dded6aba", "bridge": "br-int", "label": "tempest-FloatingIPsAssociationNegativeTestJSON-757236527-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "e816704de44649099e96127498eed7e9", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "74c816b2-b8b0-432e-baac-662ed8ea0417", "external-id": "nsx-vlan-transportzone-776", "segmentation_id": 776, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap7f590f4e-bb", "ovs_interfaceid": "7f590f4e-bb84-4d6e-a500-7c726b18a9c0", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=61962) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1281.516254] env[61962]: DEBUG nova.virt.vmwareapi.vmops [None req-e958cfa4-b00d-4980-bc5a-c65b69f7ca2e tempest-FloatingIPsAssociationNegativeTestJSON-1886607009 tempest-FloatingIPsAssociationNegativeTestJSON-1886607009-project-member] [instance: 1f7429fe-e6f3-4b0a-bae8-38ead4607ae7] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:d7:2e:df', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '74c816b2-b8b0-432e-baac-662ed8ea0417', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '7f590f4e-bb84-4d6e-a500-7c726b18a9c0', 'vif_model': 'vmxnet3'}] {{(pid=61962) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1281.524502] env[61962]: DEBUG nova.virt.vmwareapi.vm_util [None req-e958cfa4-b00d-4980-bc5a-c65b69f7ca2e tempest-FloatingIPsAssociationNegativeTestJSON-1886607009 tempest-FloatingIPsAssociationNegativeTestJSON-1886607009-project-member] Creating folder: Project (e816704de44649099e96127498eed7e9). Parent ref: group-v953327. {{(pid=61962) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 1281.525090] env[61962]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-f72bfe31-8769-4367-96b3-3345c64edb5b {{(pid=61962) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1281.537688] env[61962]: INFO nova.virt.vmwareapi.vm_util [None req-e958cfa4-b00d-4980-bc5a-c65b69f7ca2e tempest-FloatingIPsAssociationNegativeTestJSON-1886607009 tempest-FloatingIPsAssociationNegativeTestJSON-1886607009-project-member] Created folder: Project (e816704de44649099e96127498eed7e9) in parent group-v953327. [ 1281.537882] env[61962]: DEBUG nova.virt.vmwareapi.vm_util [None req-e958cfa4-b00d-4980-bc5a-c65b69f7ca2e tempest-FloatingIPsAssociationNegativeTestJSON-1886607009 tempest-FloatingIPsAssociationNegativeTestJSON-1886607009-project-member] Creating folder: Instances. Parent ref: group-v953400. {{(pid=61962) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 1281.538196] env[61962]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-82d8f591-5e5a-4e2a-b052-a38193dd2a97 {{(pid=61962) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1281.548174] env[61962]: INFO nova.virt.vmwareapi.vm_util [None req-e958cfa4-b00d-4980-bc5a-c65b69f7ca2e tempest-FloatingIPsAssociationNegativeTestJSON-1886607009 tempest-FloatingIPsAssociationNegativeTestJSON-1886607009-project-member] Created folder: Instances in parent group-v953400. [ 1281.548427] env[61962]: DEBUG oslo.service.loopingcall [None req-e958cfa4-b00d-4980-bc5a-c65b69f7ca2e tempest-FloatingIPsAssociationNegativeTestJSON-1886607009 tempest-FloatingIPsAssociationNegativeTestJSON-1886607009-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=61962) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1281.548623] env[61962]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 1f7429fe-e6f3-4b0a-bae8-38ead4607ae7] Creating VM on the ESX host {{(pid=61962) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 1281.548858] env[61962]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-ea67be73-29b0-4c6b-833a-93fbff343352 {{(pid=61962) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1281.569570] env[61962]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1281.569570] env[61962]: value = "task-4892025" [ 1281.569570] env[61962]: _type = "Task" [ 1281.569570] env[61962]: } to complete. {{(pid=61962) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1281.578153] env[61962]: DEBUG oslo_vmware.api [-] Task: {'id': task-4892025, 'name': CreateVM_Task} progress is 0%. {{(pid=61962) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1282.080170] env[61962]: DEBUG oslo_vmware.api [-] Task: {'id': task-4892025, 'name': CreateVM_Task, 'duration_secs': 0.306171} completed successfully. {{(pid=61962) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1282.080357] env[61962]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 1f7429fe-e6f3-4b0a-bae8-38ead4607ae7] Created VM on the ESX host {{(pid=61962) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 1282.080930] env[61962]: DEBUG oslo_concurrency.lockutils [None req-e958cfa4-b00d-4980-bc5a-c65b69f7ca2e tempest-FloatingIPsAssociationNegativeTestJSON-1886607009 tempest-FloatingIPsAssociationNegativeTestJSON-1886607009-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/f684bb17-3ab2-4bd5-a19e-4c36c57d0ebe" {{(pid=61962) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1282.081141] env[61962]: DEBUG oslo_concurrency.lockutils [None req-e958cfa4-b00d-4980-bc5a-c65b69f7ca2e tempest-FloatingIPsAssociationNegativeTestJSON-1886607009 tempest-FloatingIPsAssociationNegativeTestJSON-1886607009-project-member] Acquired lock "[datastore2] devstack-image-cache_base/f684bb17-3ab2-4bd5-a19e-4c36c57d0ebe" {{(pid=61962) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1282.081478] env[61962]: DEBUG oslo_concurrency.lockutils [None req-e958cfa4-b00d-4980-bc5a-c65b69f7ca2e tempest-FloatingIPsAssociationNegativeTestJSON-1886607009 tempest-FloatingIPsAssociationNegativeTestJSON-1886607009-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/f684bb17-3ab2-4bd5-a19e-4c36c57d0ebe" {{(pid=61962) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1282.081743] env[61962]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-facfd162-3711-4774-b216-0957abd447db {{(pid=61962) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1282.087335] env[61962]: DEBUG oslo_vmware.api [None req-e958cfa4-b00d-4980-bc5a-c65b69f7ca2e tempest-FloatingIPsAssociationNegativeTestJSON-1886607009 tempest-FloatingIPsAssociationNegativeTestJSON-1886607009-project-member] Waiting for the task: (returnval){ [ 1282.087335] env[61962]: value = "session[5210918f-aadc-9b29-42fa-0929c2e42627]529865af-22c7-3e27-af4f-0c57bce29ddd" [ 1282.087335] env[61962]: _type = "Task" [ 1282.087335] env[61962]: } to complete. {{(pid=61962) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1282.095772] env[61962]: DEBUG oslo_vmware.api [None req-e958cfa4-b00d-4980-bc5a-c65b69f7ca2e tempest-FloatingIPsAssociationNegativeTestJSON-1886607009 tempest-FloatingIPsAssociationNegativeTestJSON-1886607009-project-member] Task: {'id': session[5210918f-aadc-9b29-42fa-0929c2e42627]529865af-22c7-3e27-af4f-0c57bce29ddd, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61962) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1282.599378] env[61962]: DEBUG oslo_concurrency.lockutils [None req-e958cfa4-b00d-4980-bc5a-c65b69f7ca2e tempest-FloatingIPsAssociationNegativeTestJSON-1886607009 tempest-FloatingIPsAssociationNegativeTestJSON-1886607009-project-member] Releasing lock "[datastore2] devstack-image-cache_base/f684bb17-3ab2-4bd5-a19e-4c36c57d0ebe" {{(pid=61962) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1282.599960] env[61962]: DEBUG nova.virt.vmwareapi.vmops [None req-e958cfa4-b00d-4980-bc5a-c65b69f7ca2e tempest-FloatingIPsAssociationNegativeTestJSON-1886607009 tempest-FloatingIPsAssociationNegativeTestJSON-1886607009-project-member] [instance: 1f7429fe-e6f3-4b0a-bae8-38ead4607ae7] Processing image f684bb17-3ab2-4bd5-a19e-4c36c57d0ebe {{(pid=61962) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1282.599960] env[61962]: DEBUG oslo_concurrency.lockutils [None req-e958cfa4-b00d-4980-bc5a-c65b69f7ca2e tempest-FloatingIPsAssociationNegativeTestJSON-1886607009 tempest-FloatingIPsAssociationNegativeTestJSON-1886607009-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/f684bb17-3ab2-4bd5-a19e-4c36c57d0ebe/f684bb17-3ab2-4bd5-a19e-4c36c57d0ebe.vmdk" {{(pid=61962) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1282.891387] env[61962]: DEBUG nova.compute.manager [req-52eaecd9-443e-40b9-a047-31d3e4bf0772 req-0046bb08-8345-48d2-82e5-aa2a12809c55 service nova] [instance: 1f7429fe-e6f3-4b0a-bae8-38ead4607ae7] Received event network-changed-7f590f4e-bb84-4d6e-a500-7c726b18a9c0 {{(pid=61962) external_instance_event /opt/stack/nova/nova/compute/manager.py:11210}} [ 1282.891459] env[61962]: DEBUG nova.compute.manager [req-52eaecd9-443e-40b9-a047-31d3e4bf0772 req-0046bb08-8345-48d2-82e5-aa2a12809c55 service nova] [instance: 1f7429fe-e6f3-4b0a-bae8-38ead4607ae7] Refreshing instance network info cache due to event network-changed-7f590f4e-bb84-4d6e-a500-7c726b18a9c0. {{(pid=61962) external_instance_event /opt/stack/nova/nova/compute/manager.py:11215}} [ 1282.891726] env[61962]: DEBUG oslo_concurrency.lockutils [req-52eaecd9-443e-40b9-a047-31d3e4bf0772 req-0046bb08-8345-48d2-82e5-aa2a12809c55 service nova] Acquiring lock "refresh_cache-1f7429fe-e6f3-4b0a-bae8-38ead4607ae7" {{(pid=61962) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1282.891799] env[61962]: DEBUG oslo_concurrency.lockutils [req-52eaecd9-443e-40b9-a047-31d3e4bf0772 req-0046bb08-8345-48d2-82e5-aa2a12809c55 service nova] Acquired lock "refresh_cache-1f7429fe-e6f3-4b0a-bae8-38ead4607ae7" {{(pid=61962) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1282.891970] env[61962]: DEBUG nova.network.neutron [req-52eaecd9-443e-40b9-a047-31d3e4bf0772 req-0046bb08-8345-48d2-82e5-aa2a12809c55 service nova] [instance: 1f7429fe-e6f3-4b0a-bae8-38ead4607ae7] Refreshing network info cache for port 7f590f4e-bb84-4d6e-a500-7c726b18a9c0 {{(pid=61962) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 1283.158305] env[61962]: DEBUG oslo_service.periodic_task [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Running periodic task ComputeManager.update_available_resource {{(pid=61962) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1283.169693] env[61962]: DEBUG oslo_concurrency.lockutils [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=61962) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1283.169929] env[61962]: DEBUG oslo_concurrency.lockutils [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=61962) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1283.170111] env[61962]: DEBUG oslo_concurrency.lockutils [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=61962) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1283.170289] env[61962]: DEBUG nova.compute.resource_tracker [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=61962) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 1283.171513] env[61962]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5b711224-0474-431e-a683-342c4d78bdfa {{(pid=61962) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1283.180807] env[61962]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cbbcb109-ec2f-4933-9cc0-1e4ef6623eae {{(pid=61962) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1283.196319] env[61962]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c5c8c5fe-8021-49f6-b5c0-faa73861c267 {{(pid=61962) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1283.203949] env[61962]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5d273fe0-ea41-4c98-8a9b-afbacffa3814 {{(pid=61962) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1283.238072] env[61962]: DEBUG nova.compute.resource_tracker [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=180565MB free_disk=96GB free_vcpus=48 pci_devices=None {{(pid=61962) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 1283.238072] env[61962]: DEBUG oslo_concurrency.lockutils [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=61962) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1283.238072] env[61962]: DEBUG oslo_concurrency.lockutils [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=61962) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1283.315255] env[61962]: DEBUG nova.network.neutron [req-52eaecd9-443e-40b9-a047-31d3e4bf0772 req-0046bb08-8345-48d2-82e5-aa2a12809c55 service nova] [instance: 1f7429fe-e6f3-4b0a-bae8-38ead4607ae7] Updated VIF entry in instance network info cache for port 7f590f4e-bb84-4d6e-a500-7c726b18a9c0. {{(pid=61962) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 1283.315431] env[61962]: DEBUG nova.network.neutron [req-52eaecd9-443e-40b9-a047-31d3e4bf0772 req-0046bb08-8345-48d2-82e5-aa2a12809c55 service nova] [instance: 1f7429fe-e6f3-4b0a-bae8-38ead4607ae7] Updating instance_info_cache with network_info: [{"id": "7f590f4e-bb84-4d6e-a500-7c726b18a9c0", "address": "fa:16:3e:d7:2e:df", "network": {"id": "87194497-676a-49ac-8b3c-b5e3dded6aba", "bridge": "br-int", "label": "tempest-FloatingIPsAssociationNegativeTestJSON-757236527-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "e816704de44649099e96127498eed7e9", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "74c816b2-b8b0-432e-baac-662ed8ea0417", "external-id": "nsx-vlan-transportzone-776", "segmentation_id": 776, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap7f590f4e-bb", "ovs_interfaceid": "7f590f4e-bb84-4d6e-a500-7c726b18a9c0", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61962) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1283.325695] env[61962]: DEBUG oslo_concurrency.lockutils [req-52eaecd9-443e-40b9-a047-31d3e4bf0772 req-0046bb08-8345-48d2-82e5-aa2a12809c55 service nova] Releasing lock "refresh_cache-1f7429fe-e6f3-4b0a-bae8-38ead4607ae7" {{(pid=61962) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1283.327011] env[61962]: DEBUG nova.compute.resource_tracker [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Instance bf82c44c-95ce-4e4c-a32f-444bba1ef5cd actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61962) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 1283.327254] env[61962]: DEBUG nova.compute.resource_tracker [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Instance 1dcafe3b-989b-4290-be54-ceccc62a323f actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61962) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 1283.327722] env[61962]: DEBUG nova.compute.resource_tracker [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Instance 454eebcc-ea18-4046-ba2e-01de1c8a954a actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61962) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 1283.327722] env[61962]: DEBUG nova.compute.resource_tracker [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Instance e76b5908-3bad-4c19-90e7-b15ca253ad80 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61962) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 1283.327722] env[61962]: DEBUG nova.compute.resource_tracker [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Instance 8ecd73a7-7f23-4994-85b9-479584831687 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61962) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 1283.327902] env[61962]: DEBUG nova.compute.resource_tracker [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Instance d89abe69-dd71-4cfe-b80b-3b2aeba0b8a9 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61962) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 1283.328099] env[61962]: DEBUG nova.compute.resource_tracker [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Instance 5572a1d3-5d2f-4af5-b9f6-ec57506e2069 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61962) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 1283.328301] env[61962]: DEBUG nova.compute.resource_tracker [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Instance d970d822-12ab-43cc-8d34-abc6eff8a68d actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61962) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 1283.328475] env[61962]: DEBUG nova.compute.resource_tracker [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Instance 35bf25a7-a3c2-40f1-b415-42d6167b0a1c actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61962) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 1283.328632] env[61962]: DEBUG nova.compute.resource_tracker [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Instance 1f7429fe-e6f3-4b0a-bae8-38ead4607ae7 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61962) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 1283.340251] env[61962]: DEBUG nova.compute.resource_tracker [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Instance c3c36949-754e-4bab-860d-74e0cde4dfd6 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61962) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1767}} [ 1283.354525] env[61962]: DEBUG nova.compute.resource_tracker [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Instance 75aed9fe-6947-46b3-9c7d-838bf0f108af has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61962) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1767}} [ 1283.367534] env[61962]: DEBUG nova.compute.resource_tracker [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Instance 0b089824-a1ec-476a-932a-6cba27a09e4e has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61962) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1767}} [ 1283.380051] env[61962]: DEBUG nova.compute.resource_tracker [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Instance 0fd2335c-e9e9-41bb-b77a-22b6f2c47946 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61962) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1767}} [ 1283.391291] env[61962]: DEBUG nova.compute.resource_tracker [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Instance 4d2bbdf5-0581-4a85-b962-36686a464bff has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61962) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1767}} [ 1283.402947] env[61962]: DEBUG nova.compute.resource_tracker [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Instance 989e57f2-6dac-4734-b895-5724b1c7a06b has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61962) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1767}} [ 1283.414817] env[61962]: DEBUG nova.compute.resource_tracker [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Instance 6f0c884b-afb3-4d06-9dd2-f07099c364e0 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61962) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1767}} [ 1283.426822] env[61962]: DEBUG nova.compute.resource_tracker [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Instance 2d9a9155-1dbb-4984-99dd-d40552eef7db has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61962) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1767}} [ 1283.436769] env[61962]: DEBUG nova.compute.resource_tracker [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Instance a2209177-23ac-4367-9736-2978ffd6fb39 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61962) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1767}} [ 1283.447007] env[61962]: DEBUG nova.compute.resource_tracker [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Instance 5b261fba-bbdf-4ba9-a065-e833f6e44ba2 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61962) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1767}} [ 1283.456838] env[61962]: DEBUG nova.compute.resource_tracker [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Instance 4453ec6d-57f7-4ae1-bafa-4dd993c22d03 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61962) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1767}} [ 1283.466824] env[61962]: DEBUG nova.compute.resource_tracker [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Instance be426e8c-aac5-4510-a9e2-20cc79a00cae has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61962) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1767}} [ 1283.476393] env[61962]: DEBUG nova.compute.resource_tracker [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Instance e62eace3-3320-441e-becb-abff7cdd6660 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61962) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1767}} [ 1283.486420] env[61962]: DEBUG nova.compute.resource_tracker [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Instance 3d2749f8-18d2-4631-adc2-093988125eb4 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61962) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1767}} [ 1283.487317] env[61962]: DEBUG nova.compute.resource_tracker [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Total usable vcpus: 48, total allocated vcpus: 10 {{(pid=61962) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 1283.487317] env[61962]: DEBUG nova.compute.resource_tracker [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=1792MB phys_disk=100GB used_disk=10GB total_vcpus=48 used_vcpus=10 pci_stats=[] stats={'failed_builds': '53', 'num_instances': '10', 'num_vm_building': '10', 'num_task_deleting': '9', 'num_os_type_None': '10', 'num_proj_389fb8d4c2ce47829fe23a6ca60e9bdd': '1', 'io_workload': '10', 'num_proj_7738f0dd32484bf9978c5d006d088c3b': '1', 'num_proj_a31ea93dd586435ebb82c1dbfd14c2ad': '1', 'num_proj_3285f5bbd10d495e852d0ef0c0047847': '1', 'num_proj_8e79e89d66b44fa092d586334732c0be': '1', 'num_proj_e58b68272c404983993a09ad12e8c03b': '1', 'num_proj_99c377cc8b5545a2b6e712291b14cb04': '1', 'num_proj_696cc1bc78cb4511a76f788d82895fef': '1', 'num_proj_a9c2add945d24a299e5bb0fe543deec3': '1', 'num_task_spawning': '1', 'num_proj_e816704de44649099e96127498eed7e9': '1'} {{(pid=61962) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 1283.784469] env[61962]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e90204fa-6873-4e5b-9ce1-61ab6ef0edc2 {{(pid=61962) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1283.792905] env[61962]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f9cdad44-98fc-4396-ba18-9a2b6984a01e {{(pid=61962) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1283.825343] env[61962]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cdeefb36-074f-49aa-a8b7-cc596417e518 {{(pid=61962) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1283.833458] env[61962]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f2bf1cfa-d157-4a0f-af2c-7452b3256958 {{(pid=61962) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1283.848169] env[61962]: DEBUG nova.compute.provider_tree [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Inventory has not changed in ProviderTree for provider: 5a20dc57-fddd-49ec-bab5-953a03eebaa1 {{(pid=61962) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1283.858440] env[61962]: DEBUG nova.scheduler.client.report [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Inventory has not changed for provider 5a20dc57-fddd-49ec-bab5-953a03eebaa1 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 96, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61962) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1283.881077] env[61962]: DEBUG nova.compute.resource_tracker [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=61962) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 1283.881331] env[61962]: DEBUG oslo_concurrency.lockutils [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 0.643s {{(pid=61962) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1284.882381] env[61962]: DEBUG oslo_service.periodic_task [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=61962) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1284.882654] env[61962]: DEBUG nova.compute.manager [None req-ba337279-4320-40ca-b616-7e590432f203 None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=61962) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10626}} [ 1285.702641] env[61962]: DEBUG oslo_concurrency.lockutils [None req-b03c4c30-9fd6-4923-9e36-c3edd6c4e2a5 tempest-ImagesTestJSON-240028984 tempest-ImagesTestJSON-240028984-project-member] Acquiring lock "1035139f-2193-4d1c-a220-369ef6096ae0" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61962) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1285.702925] env[61962]: DEBUG oslo_concurrency.lockutils [None req-b03c4c30-9fd6-4923-9e36-c3edd6c4e2a5 tempest-ImagesTestJSON-240028984 tempest-ImagesTestJSON-240028984-project-member] Lock "1035139f-2193-4d1c-a220-369ef6096ae0" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61962) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1286.154045] env[61962]: DEBUG oslo_service.periodic_task [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=61962) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1290.157912] env[61962]: DEBUG oslo_service.periodic_task [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=61962) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1290.158207] env[61962]: DEBUG nova.compute.manager [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Starting heal instance info cache {{(pid=61962) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10007}} [ 1290.158237] env[61962]: DEBUG nova.compute.manager [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Rebuilding the list of instances to heal {{(pid=61962) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10011}} [ 1290.181133] env[61962]: DEBUG nova.compute.manager [None req-ba337279-4320-40ca-b616-7e590432f203 None None] [instance: bf82c44c-95ce-4e4c-a32f-444bba1ef5cd] Skipping network cache update for instance because it is Building. {{(pid=61962) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10020}} [ 1290.181319] env[61962]: DEBUG nova.compute.manager [None req-ba337279-4320-40ca-b616-7e590432f203 None None] [instance: 1dcafe3b-989b-4290-be54-ceccc62a323f] Skipping network cache update for instance because it is Building. {{(pid=61962) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10020}} [ 1290.181454] env[61962]: DEBUG nova.compute.manager [None req-ba337279-4320-40ca-b616-7e590432f203 None None] [instance: 454eebcc-ea18-4046-ba2e-01de1c8a954a] Skipping network cache update for instance because it is Building. {{(pid=61962) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10020}} [ 1290.181586] env[61962]: DEBUG nova.compute.manager [None req-ba337279-4320-40ca-b616-7e590432f203 None None] [instance: e76b5908-3bad-4c19-90e7-b15ca253ad80] Skipping network cache update for instance because it is Building. {{(pid=61962) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10020}} [ 1290.181712] env[61962]: DEBUG nova.compute.manager [None req-ba337279-4320-40ca-b616-7e590432f203 None None] [instance: 8ecd73a7-7f23-4994-85b9-479584831687] Skipping network cache update for instance because it is Building. {{(pid=61962) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10020}} [ 1290.181830] env[61962]: DEBUG nova.compute.manager [None req-ba337279-4320-40ca-b616-7e590432f203 None None] [instance: d89abe69-dd71-4cfe-b80b-3b2aeba0b8a9] Skipping network cache update for instance because it is Building. {{(pid=61962) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10020}} [ 1290.181950] env[61962]: DEBUG nova.compute.manager [None req-ba337279-4320-40ca-b616-7e590432f203 None None] [instance: 5572a1d3-5d2f-4af5-b9f6-ec57506e2069] Skipping network cache update for instance because it is Building. {{(pid=61962) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10020}} [ 1290.182108] env[61962]: DEBUG nova.compute.manager [None req-ba337279-4320-40ca-b616-7e590432f203 None None] [instance: d970d822-12ab-43cc-8d34-abc6eff8a68d] Skipping network cache update for instance because it is Building. {{(pid=61962) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10020}} [ 1290.182307] env[61962]: DEBUG nova.compute.manager [None req-ba337279-4320-40ca-b616-7e590432f203 None None] [instance: 35bf25a7-a3c2-40f1-b415-42d6167b0a1c] Skipping network cache update for instance because it is Building. {{(pid=61962) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10020}} [ 1290.182458] env[61962]: DEBUG nova.compute.manager [None req-ba337279-4320-40ca-b616-7e590432f203 None None] [instance: 1f7429fe-e6f3-4b0a-bae8-38ead4607ae7] Skipping network cache update for instance because it is Building. {{(pid=61962) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10020}} [ 1290.182588] env[61962]: DEBUG nova.compute.manager [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Didn't find any instances for network info cache update. {{(pid=61962) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10093}} [ 1290.183096] env[61962]: DEBUG oslo_service.periodic_task [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=61962) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1293.158529] env[61962]: DEBUG oslo_service.periodic_task [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=61962) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1293.158529] env[61962]: DEBUG oslo_service.periodic_task [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=61962) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1295.159330] env[61962]: DEBUG oslo_service.periodic_task [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=61962) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1296.159085] env[61962]: DEBUG oslo_service.periodic_task [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=61962) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1305.033402] env[61962]: DEBUG oslo_concurrency.lockutils [None req-81384ae4-31b1-40ea-af83-ed0d8b62047c tempest-FloatingIPsAssociationNegativeTestJSON-1886607009 tempest-FloatingIPsAssociationNegativeTestJSON-1886607009-project-member] Acquiring lock "1f7429fe-e6f3-4b0a-bae8-38ead4607ae7" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=61962) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1326.637564] env[61962]: WARNING oslo_vmware.rw_handles [None req-39280a80-f6a9-4818-9c77-d5be7d130e72 tempest-ServersTestFqdnHostnames-1235394237 tempest-ServersTestFqdnHostnames-1235394237-project-member] Error occurred while reading the HTTP response.: http.client.RemoteDisconnected: Remote end closed connection without response [ 1326.637564] env[61962]: ERROR oslo_vmware.rw_handles Traceback (most recent call last): [ 1326.637564] env[61962]: ERROR oslo_vmware.rw_handles File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py", line 283, in close [ 1326.637564] env[61962]: ERROR oslo_vmware.rw_handles self._conn.getresponse() [ 1326.637564] env[61962]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 1375, in getresponse [ 1326.637564] env[61962]: ERROR oslo_vmware.rw_handles response.begin() [ 1326.637564] env[61962]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 318, in begin [ 1326.637564] env[61962]: ERROR oslo_vmware.rw_handles version, status, reason = self._read_status() [ 1326.637564] env[61962]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 287, in _read_status [ 1326.637564] env[61962]: ERROR oslo_vmware.rw_handles raise RemoteDisconnected("Remote end closed connection without" [ 1326.637564] env[61962]: ERROR oslo_vmware.rw_handles http.client.RemoteDisconnected: Remote end closed connection without response [ 1326.637564] env[61962]: ERROR oslo_vmware.rw_handles [ 1326.638148] env[61962]: DEBUG nova.virt.vmwareapi.images [None req-39280a80-f6a9-4818-9c77-d5be7d130e72 tempest-ServersTestFqdnHostnames-1235394237 tempest-ServersTestFqdnHostnames-1235394237-project-member] [instance: bf82c44c-95ce-4e4c-a32f-444bba1ef5cd] Downloaded image file data f684bb17-3ab2-4bd5-a19e-4c36c57d0ebe to vmware_temp/f122f0b5-825b-4ddb-884b-4c3ab53250dc/f684bb17-3ab2-4bd5-a19e-4c36c57d0ebe/tmp-sparse.vmdk on the data store datastore2 {{(pid=61962) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:258}} [ 1326.640052] env[61962]: DEBUG nova.virt.vmwareapi.vmops [None req-39280a80-f6a9-4818-9c77-d5be7d130e72 tempest-ServersTestFqdnHostnames-1235394237 tempest-ServersTestFqdnHostnames-1235394237-project-member] [instance: bf82c44c-95ce-4e4c-a32f-444bba1ef5cd] Caching image {{(pid=61962) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 1326.640308] env[61962]: DEBUG nova.virt.vmwareapi.vm_util [None req-39280a80-f6a9-4818-9c77-d5be7d130e72 tempest-ServersTestFqdnHostnames-1235394237 tempest-ServersTestFqdnHostnames-1235394237-project-member] Copying Virtual Disk [datastore2] vmware_temp/f122f0b5-825b-4ddb-884b-4c3ab53250dc/f684bb17-3ab2-4bd5-a19e-4c36c57d0ebe/tmp-sparse.vmdk to [datastore2] vmware_temp/f122f0b5-825b-4ddb-884b-4c3ab53250dc/f684bb17-3ab2-4bd5-a19e-4c36c57d0ebe/f684bb17-3ab2-4bd5-a19e-4c36c57d0ebe.vmdk {{(pid=61962) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 1326.640775] env[61962]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-5db81675-042a-499c-846d-ef7d416c75de {{(pid=61962) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1326.649564] env[61962]: DEBUG oslo_vmware.api [None req-39280a80-f6a9-4818-9c77-d5be7d130e72 tempest-ServersTestFqdnHostnames-1235394237 tempest-ServersTestFqdnHostnames-1235394237-project-member] Waiting for the task: (returnval){ [ 1326.649564] env[61962]: value = "task-4892026" [ 1326.649564] env[61962]: _type = "Task" [ 1326.649564] env[61962]: } to complete. {{(pid=61962) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1326.657881] env[61962]: DEBUG oslo_vmware.api [None req-39280a80-f6a9-4818-9c77-d5be7d130e72 tempest-ServersTestFqdnHostnames-1235394237 tempest-ServersTestFqdnHostnames-1235394237-project-member] Task: {'id': task-4892026, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61962) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1327.160815] env[61962]: DEBUG oslo_vmware.exceptions [None req-39280a80-f6a9-4818-9c77-d5be7d130e72 tempest-ServersTestFqdnHostnames-1235394237 tempest-ServersTestFqdnHostnames-1235394237-project-member] Fault InvalidArgument not matched. {{(pid=61962) get_fault_class /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/exceptions.py:290}} [ 1327.161184] env[61962]: DEBUG oslo_concurrency.lockutils [None req-39280a80-f6a9-4818-9c77-d5be7d130e72 tempest-ServersTestFqdnHostnames-1235394237 tempest-ServersTestFqdnHostnames-1235394237-project-member] Releasing lock "[datastore2] devstack-image-cache_base/f684bb17-3ab2-4bd5-a19e-4c36c57d0ebe/f684bb17-3ab2-4bd5-a19e-4c36c57d0ebe.vmdk" {{(pid=61962) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1327.161805] env[61962]: ERROR nova.compute.manager [None req-39280a80-f6a9-4818-9c77-d5be7d130e72 tempest-ServersTestFqdnHostnames-1235394237 tempest-ServersTestFqdnHostnames-1235394237-project-member] [instance: bf82c44c-95ce-4e4c-a32f-444bba1ef5cd] Instance failed to spawn: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1327.161805] env[61962]: Faults: ['InvalidArgument'] [ 1327.161805] env[61962]: ERROR nova.compute.manager [instance: bf82c44c-95ce-4e4c-a32f-444bba1ef5cd] Traceback (most recent call last): [ 1327.161805] env[61962]: ERROR nova.compute.manager [instance: bf82c44c-95ce-4e4c-a32f-444bba1ef5cd] File "/opt/stack/nova/nova/compute/manager.py", line 2886, in _build_resources [ 1327.161805] env[61962]: ERROR nova.compute.manager [instance: bf82c44c-95ce-4e4c-a32f-444bba1ef5cd] yield resources [ 1327.161805] env[61962]: ERROR nova.compute.manager [instance: bf82c44c-95ce-4e4c-a32f-444bba1ef5cd] File "/opt/stack/nova/nova/compute/manager.py", line 2633, in _build_and_run_instance [ 1327.161805] env[61962]: ERROR nova.compute.manager [instance: bf82c44c-95ce-4e4c-a32f-444bba1ef5cd] self.driver.spawn(context, instance, image_meta, [ 1327.161805] env[61962]: ERROR nova.compute.manager [instance: bf82c44c-95ce-4e4c-a32f-444bba1ef5cd] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 1327.161805] env[61962]: ERROR nova.compute.manager [instance: bf82c44c-95ce-4e4c-a32f-444bba1ef5cd] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1327.161805] env[61962]: ERROR nova.compute.manager [instance: bf82c44c-95ce-4e4c-a32f-444bba1ef5cd] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 1327.161805] env[61962]: ERROR nova.compute.manager [instance: bf82c44c-95ce-4e4c-a32f-444bba1ef5cd] self._fetch_image_if_missing(context, vi) [ 1327.161805] env[61962]: ERROR nova.compute.manager [instance: bf82c44c-95ce-4e4c-a32f-444bba1ef5cd] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 1327.162172] env[61962]: ERROR nova.compute.manager [instance: bf82c44c-95ce-4e4c-a32f-444bba1ef5cd] image_cache(vi, tmp_image_ds_loc) [ 1327.162172] env[61962]: ERROR nova.compute.manager [instance: bf82c44c-95ce-4e4c-a32f-444bba1ef5cd] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 1327.162172] env[61962]: ERROR nova.compute.manager [instance: bf82c44c-95ce-4e4c-a32f-444bba1ef5cd] vm_util.copy_virtual_disk( [ 1327.162172] env[61962]: ERROR nova.compute.manager [instance: bf82c44c-95ce-4e4c-a32f-444bba1ef5cd] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 1327.162172] env[61962]: ERROR nova.compute.manager [instance: bf82c44c-95ce-4e4c-a32f-444bba1ef5cd] session._wait_for_task(vmdk_copy_task) [ 1327.162172] env[61962]: ERROR nova.compute.manager [instance: bf82c44c-95ce-4e4c-a32f-444bba1ef5cd] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 1327.162172] env[61962]: ERROR nova.compute.manager [instance: bf82c44c-95ce-4e4c-a32f-444bba1ef5cd] return self.wait_for_task(task_ref) [ 1327.162172] env[61962]: ERROR nova.compute.manager [instance: bf82c44c-95ce-4e4c-a32f-444bba1ef5cd] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 1327.162172] env[61962]: ERROR nova.compute.manager [instance: bf82c44c-95ce-4e4c-a32f-444bba1ef5cd] return evt.wait() [ 1327.162172] env[61962]: ERROR nova.compute.manager [instance: bf82c44c-95ce-4e4c-a32f-444bba1ef5cd] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 1327.162172] env[61962]: ERROR nova.compute.manager [instance: bf82c44c-95ce-4e4c-a32f-444bba1ef5cd] result = hub.switch() [ 1327.162172] env[61962]: ERROR nova.compute.manager [instance: bf82c44c-95ce-4e4c-a32f-444bba1ef5cd] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 1327.162172] env[61962]: ERROR nova.compute.manager [instance: bf82c44c-95ce-4e4c-a32f-444bba1ef5cd] return self.greenlet.switch() [ 1327.162461] env[61962]: ERROR nova.compute.manager [instance: bf82c44c-95ce-4e4c-a32f-444bba1ef5cd] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 1327.162461] env[61962]: ERROR nova.compute.manager [instance: bf82c44c-95ce-4e4c-a32f-444bba1ef5cd] self.f(*self.args, **self.kw) [ 1327.162461] env[61962]: ERROR nova.compute.manager [instance: bf82c44c-95ce-4e4c-a32f-444bba1ef5cd] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 1327.162461] env[61962]: ERROR nova.compute.manager [instance: bf82c44c-95ce-4e4c-a32f-444bba1ef5cd] raise exceptions.translate_fault(task_info.error) [ 1327.162461] env[61962]: ERROR nova.compute.manager [instance: bf82c44c-95ce-4e4c-a32f-444bba1ef5cd] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1327.162461] env[61962]: ERROR nova.compute.manager [instance: bf82c44c-95ce-4e4c-a32f-444bba1ef5cd] Faults: ['InvalidArgument'] [ 1327.162461] env[61962]: ERROR nova.compute.manager [instance: bf82c44c-95ce-4e4c-a32f-444bba1ef5cd] [ 1327.162461] env[61962]: INFO nova.compute.manager [None req-39280a80-f6a9-4818-9c77-d5be7d130e72 tempest-ServersTestFqdnHostnames-1235394237 tempest-ServersTestFqdnHostnames-1235394237-project-member] [instance: bf82c44c-95ce-4e4c-a32f-444bba1ef5cd] Terminating instance [ 1327.163784] env[61962]: DEBUG oslo_concurrency.lockutils [None req-aeada297-64d9-442f-a2b8-fc450e4cd4d2 tempest-ServersAdminTestJSON-987189382 tempest-ServersAdminTestJSON-987189382-project-member] Acquired lock "[datastore2] devstack-image-cache_base/f684bb17-3ab2-4bd5-a19e-4c36c57d0ebe/f684bb17-3ab2-4bd5-a19e-4c36c57d0ebe.vmdk" {{(pid=61962) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1327.163974] env[61962]: DEBUG nova.virt.vmwareapi.ds_util [None req-aeada297-64d9-442f-a2b8-fc450e4cd4d2 tempest-ServersAdminTestJSON-987189382 tempest-ServersAdminTestJSON-987189382-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=61962) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1327.164232] env[61962]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-f9a0b0db-50f1-478f-be72-71db885dd0e7 {{(pid=61962) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1327.166489] env[61962]: DEBUG nova.compute.manager [None req-39280a80-f6a9-4818-9c77-d5be7d130e72 tempest-ServersTestFqdnHostnames-1235394237 tempest-ServersTestFqdnHostnames-1235394237-project-member] [instance: bf82c44c-95ce-4e4c-a32f-444bba1ef5cd] Start destroying the instance on the hypervisor. {{(pid=61962) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3142}} [ 1327.166685] env[61962]: DEBUG nova.virt.vmwareapi.vmops [None req-39280a80-f6a9-4818-9c77-d5be7d130e72 tempest-ServersTestFqdnHostnames-1235394237 tempest-ServersTestFqdnHostnames-1235394237-project-member] [instance: bf82c44c-95ce-4e4c-a32f-444bba1ef5cd] Destroying instance {{(pid=61962) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1327.167455] env[61962]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-714fa667-655a-4481-bae3-5f22a1f151bb {{(pid=61962) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1327.174656] env[61962]: DEBUG nova.virt.vmwareapi.vmops [None req-39280a80-f6a9-4818-9c77-d5be7d130e72 tempest-ServersTestFqdnHostnames-1235394237 tempest-ServersTestFqdnHostnames-1235394237-project-member] [instance: bf82c44c-95ce-4e4c-a32f-444bba1ef5cd] Unregistering the VM {{(pid=61962) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 1327.174883] env[61962]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-ac6de6e1-6f8c-4699-bad3-2b7c9a117d3b {{(pid=61962) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1327.177174] env[61962]: DEBUG nova.virt.vmwareapi.ds_util [None req-aeada297-64d9-442f-a2b8-fc450e4cd4d2 tempest-ServersAdminTestJSON-987189382 tempest-ServersAdminTestJSON-987189382-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=61962) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1327.177347] env[61962]: DEBUG nova.virt.vmwareapi.vmops [None req-aeada297-64d9-442f-a2b8-fc450e4cd4d2 tempest-ServersAdminTestJSON-987189382 tempest-ServersAdminTestJSON-987189382-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=61962) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 1327.178352] env[61962]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-c3fc50f4-a310-416a-9172-4929aa4a5847 {{(pid=61962) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1327.183731] env[61962]: DEBUG oslo_vmware.api [None req-aeada297-64d9-442f-a2b8-fc450e4cd4d2 tempest-ServersAdminTestJSON-987189382 tempest-ServersAdminTestJSON-987189382-project-member] Waiting for the task: (returnval){ [ 1327.183731] env[61962]: value = "session[5210918f-aadc-9b29-42fa-0929c2e42627]52793a1a-77e9-3f30-c135-8e8dfaf1ce13" [ 1327.183731] env[61962]: _type = "Task" [ 1327.183731] env[61962]: } to complete. {{(pid=61962) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1327.193036] env[61962]: DEBUG oslo_vmware.api [None req-aeada297-64d9-442f-a2b8-fc450e4cd4d2 tempest-ServersAdminTestJSON-987189382 tempest-ServersAdminTestJSON-987189382-project-member] Task: {'id': session[5210918f-aadc-9b29-42fa-0929c2e42627]52793a1a-77e9-3f30-c135-8e8dfaf1ce13, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61962) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1327.253907] env[61962]: DEBUG nova.virt.vmwareapi.vmops [None req-39280a80-f6a9-4818-9c77-d5be7d130e72 tempest-ServersTestFqdnHostnames-1235394237 tempest-ServersTestFqdnHostnames-1235394237-project-member] [instance: bf82c44c-95ce-4e4c-a32f-444bba1ef5cd] Unregistered the VM {{(pid=61962) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 1327.254159] env[61962]: DEBUG nova.virt.vmwareapi.vmops [None req-39280a80-f6a9-4818-9c77-d5be7d130e72 tempest-ServersTestFqdnHostnames-1235394237 tempest-ServersTestFqdnHostnames-1235394237-project-member] [instance: bf82c44c-95ce-4e4c-a32f-444bba1ef5cd] Deleting contents of the VM from datastore datastore2 {{(pid=61962) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 1327.254446] env[61962]: DEBUG nova.virt.vmwareapi.ds_util [None req-39280a80-f6a9-4818-9c77-d5be7d130e72 tempest-ServersTestFqdnHostnames-1235394237 tempest-ServersTestFqdnHostnames-1235394237-project-member] Deleting the datastore file [datastore2] bf82c44c-95ce-4e4c-a32f-444bba1ef5cd {{(pid=61962) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1327.254786] env[61962]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-da4a9eaf-d9c2-40a7-b3da-f1da6bcd2832 {{(pid=61962) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1327.261997] env[61962]: DEBUG oslo_vmware.api [None req-39280a80-f6a9-4818-9c77-d5be7d130e72 tempest-ServersTestFqdnHostnames-1235394237 tempest-ServersTestFqdnHostnames-1235394237-project-member] Waiting for the task: (returnval){ [ 1327.261997] env[61962]: value = "task-4892028" [ 1327.261997] env[61962]: _type = "Task" [ 1327.261997] env[61962]: } to complete. {{(pid=61962) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1327.270293] env[61962]: DEBUG oslo_vmware.api [None req-39280a80-f6a9-4818-9c77-d5be7d130e72 tempest-ServersTestFqdnHostnames-1235394237 tempest-ServersTestFqdnHostnames-1235394237-project-member] Task: {'id': task-4892028, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61962) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1327.696857] env[61962]: DEBUG nova.virt.vmwareapi.vmops [None req-aeada297-64d9-442f-a2b8-fc450e4cd4d2 tempest-ServersAdminTestJSON-987189382 tempest-ServersAdminTestJSON-987189382-project-member] [instance: 1dcafe3b-989b-4290-be54-ceccc62a323f] Preparing fetch location {{(pid=61962) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 1327.697154] env[61962]: DEBUG nova.virt.vmwareapi.ds_util [None req-aeada297-64d9-442f-a2b8-fc450e4cd4d2 tempest-ServersAdminTestJSON-987189382 tempest-ServersAdminTestJSON-987189382-project-member] Creating directory with path [datastore2] vmware_temp/e9f74ca4-fe59-4fb9-9821-06331ed1a6c6/f684bb17-3ab2-4bd5-a19e-4c36c57d0ebe {{(pid=61962) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1327.697391] env[61962]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-82564725-73e0-4099-b637-8d9742970d99 {{(pid=61962) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1327.712386] env[61962]: DEBUG nova.virt.vmwareapi.ds_util [None req-aeada297-64d9-442f-a2b8-fc450e4cd4d2 tempest-ServersAdminTestJSON-987189382 tempest-ServersAdminTestJSON-987189382-project-member] Created directory with path [datastore2] vmware_temp/e9f74ca4-fe59-4fb9-9821-06331ed1a6c6/f684bb17-3ab2-4bd5-a19e-4c36c57d0ebe {{(pid=61962) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1327.712647] env[61962]: DEBUG nova.virt.vmwareapi.vmops [None req-aeada297-64d9-442f-a2b8-fc450e4cd4d2 tempest-ServersAdminTestJSON-987189382 tempest-ServersAdminTestJSON-987189382-project-member] [instance: 1dcafe3b-989b-4290-be54-ceccc62a323f] Fetch image to [datastore2] vmware_temp/e9f74ca4-fe59-4fb9-9821-06331ed1a6c6/f684bb17-3ab2-4bd5-a19e-4c36c57d0ebe/tmp-sparse.vmdk {{(pid=61962) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 1327.712825] env[61962]: DEBUG nova.virt.vmwareapi.vmops [None req-aeada297-64d9-442f-a2b8-fc450e4cd4d2 tempest-ServersAdminTestJSON-987189382 tempest-ServersAdminTestJSON-987189382-project-member] [instance: 1dcafe3b-989b-4290-be54-ceccc62a323f] Downloading image file data f684bb17-3ab2-4bd5-a19e-4c36c57d0ebe to [datastore2] vmware_temp/e9f74ca4-fe59-4fb9-9821-06331ed1a6c6/f684bb17-3ab2-4bd5-a19e-4c36c57d0ebe/tmp-sparse.vmdk on the data store datastore2 {{(pid=61962) _fetch_image_as_file /opt/stack/nova/nova/virt/vmwareapi/vmops.py:399}} [ 1327.713693] env[61962]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1e9d0b3b-3c3f-4ec6-ad29-cc0c0abea8ea {{(pid=61962) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1327.721265] env[61962]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e55cd9eb-4bf9-43f2-85a4-0c55532dd775 {{(pid=61962) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1327.732166] env[61962]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b082c705-21b1-4a58-b42e-5010cc2772f4 {{(pid=61962) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1327.768064] env[61962]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-519f5c05-68b7-4c10-9738-7e07f0ba8bf3 {{(pid=61962) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1327.776184] env[61962]: DEBUG oslo_vmware.api [None req-39280a80-f6a9-4818-9c77-d5be7d130e72 tempest-ServersTestFqdnHostnames-1235394237 tempest-ServersTestFqdnHostnames-1235394237-project-member] Task: {'id': task-4892028, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.071565} completed successfully. {{(pid=61962) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1327.777744] env[61962]: DEBUG nova.virt.vmwareapi.ds_util [None req-39280a80-f6a9-4818-9c77-d5be7d130e72 tempest-ServersTestFqdnHostnames-1235394237 tempest-ServersTestFqdnHostnames-1235394237-project-member] Deleted the datastore file {{(pid=61962) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1327.777943] env[61962]: DEBUG nova.virt.vmwareapi.vmops [None req-39280a80-f6a9-4818-9c77-d5be7d130e72 tempest-ServersTestFqdnHostnames-1235394237 tempest-ServersTestFqdnHostnames-1235394237-project-member] [instance: bf82c44c-95ce-4e4c-a32f-444bba1ef5cd] Deleted contents of the VM from datastore datastore2 {{(pid=61962) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 1327.778131] env[61962]: DEBUG nova.virt.vmwareapi.vmops [None req-39280a80-f6a9-4818-9c77-d5be7d130e72 tempest-ServersTestFqdnHostnames-1235394237 tempest-ServersTestFqdnHostnames-1235394237-project-member] [instance: bf82c44c-95ce-4e4c-a32f-444bba1ef5cd] Instance destroyed {{(pid=61962) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1327.778305] env[61962]: INFO nova.compute.manager [None req-39280a80-f6a9-4818-9c77-d5be7d130e72 tempest-ServersTestFqdnHostnames-1235394237 tempest-ServersTestFqdnHostnames-1235394237-project-member] [instance: bf82c44c-95ce-4e4c-a32f-444bba1ef5cd] Took 0.61 seconds to destroy the instance on the hypervisor. [ 1327.780248] env[61962]: DEBUG oslo_vmware.service [-] Invoking SessionManager.AcquireGenericServiceTicket with opID=oslo.vmware-2acad7c2-80b0-402a-8437-702127f2f0b4 {{(pid=61962) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1327.782736] env[61962]: DEBUG nova.compute.claims [None req-39280a80-f6a9-4818-9c77-d5be7d130e72 tempest-ServersTestFqdnHostnames-1235394237 tempest-ServersTestFqdnHostnames-1235394237-project-member] [instance: bf82c44c-95ce-4e4c-a32f-444bba1ef5cd] Aborting claim: {{(pid=61962) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 1327.782736] env[61962]: DEBUG oslo_concurrency.lockutils [None req-39280a80-f6a9-4818-9c77-d5be7d130e72 tempest-ServersTestFqdnHostnames-1235394237 tempest-ServersTestFqdnHostnames-1235394237-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=61962) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1327.782824] env[61962]: DEBUG oslo_concurrency.lockutils [None req-39280a80-f6a9-4818-9c77-d5be7d130e72 tempest-ServersTestFqdnHostnames-1235394237 tempest-ServersTestFqdnHostnames-1235394237-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 0.000s {{(pid=61962) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1327.810812] env[61962]: DEBUG nova.virt.vmwareapi.images [None req-aeada297-64d9-442f-a2b8-fc450e4cd4d2 tempest-ServersAdminTestJSON-987189382 tempest-ServersAdminTestJSON-987189382-project-member] [instance: 1dcafe3b-989b-4290-be54-ceccc62a323f] Downloading image file data f684bb17-3ab2-4bd5-a19e-4c36c57d0ebe to the data store datastore2 {{(pid=61962) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:245}} [ 1327.963727] env[61962]: DEBUG oslo_vmware.rw_handles [None req-aeada297-64d9-442f-a2b8-fc450e4cd4d2 tempest-ServersAdminTestJSON-987189382 tempest-ServersAdminTestJSON-987189382-project-member] Creating HTTP connection to write to file with size = 21318656 and URL = https://esx7c2n2.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/e9f74ca4-fe59-4fb9-9821-06331ed1a6c6/f684bb17-3ab2-4bd5-a19e-4c36c57d0ebe/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=61962) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 1328.024441] env[61962]: DEBUG oslo_vmware.rw_handles [None req-aeada297-64d9-442f-a2b8-fc450e4cd4d2 tempest-ServersAdminTestJSON-987189382 tempest-ServersAdminTestJSON-987189382-project-member] Completed reading data from the image iterator. {{(pid=61962) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 1328.024626] env[61962]: DEBUG oslo_vmware.rw_handles [None req-aeada297-64d9-442f-a2b8-fc450e4cd4d2 tempest-ServersAdminTestJSON-987189382 tempest-ServersAdminTestJSON-987189382-project-member] Closing write handle for https://esx7c2n2.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/e9f74ca4-fe59-4fb9-9821-06331ed1a6c6/f684bb17-3ab2-4bd5-a19e-4c36c57d0ebe/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=61962) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:281}} [ 1328.225746] env[61962]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ebb66429-e506-4195-92ab-37e72cfb6819 {{(pid=61962) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1328.234138] env[61962]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e83aeaf4-a07e-442f-804c-e7cf6aaf16a1 {{(pid=61962) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1328.266246] env[61962]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f1b41bf5-069a-409e-9bb6-9a561b21925c {{(pid=61962) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1328.274689] env[61962]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b5d16068-dcc3-4b94-8fcc-6edc0dcf0622 {{(pid=61962) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1328.288797] env[61962]: DEBUG nova.compute.provider_tree [None req-39280a80-f6a9-4818-9c77-d5be7d130e72 tempest-ServersTestFqdnHostnames-1235394237 tempest-ServersTestFqdnHostnames-1235394237-project-member] Inventory has not changed in ProviderTree for provider: 5a20dc57-fddd-49ec-bab5-953a03eebaa1 {{(pid=61962) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1328.297979] env[61962]: DEBUG nova.scheduler.client.report [None req-39280a80-f6a9-4818-9c77-d5be7d130e72 tempest-ServersTestFqdnHostnames-1235394237 tempest-ServersTestFqdnHostnames-1235394237-project-member] Inventory has not changed for provider 5a20dc57-fddd-49ec-bab5-953a03eebaa1 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 96, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61962) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1328.315430] env[61962]: DEBUG oslo_concurrency.lockutils [None req-39280a80-f6a9-4818-9c77-d5be7d130e72 tempest-ServersTestFqdnHostnames-1235394237 tempest-ServersTestFqdnHostnames-1235394237-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 0.532s {{(pid=61962) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1328.315971] env[61962]: ERROR nova.compute.manager [None req-39280a80-f6a9-4818-9c77-d5be7d130e72 tempest-ServersTestFqdnHostnames-1235394237 tempest-ServersTestFqdnHostnames-1235394237-project-member] [instance: bf82c44c-95ce-4e4c-a32f-444bba1ef5cd] Failed to build and run instance: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1328.315971] env[61962]: Faults: ['InvalidArgument'] [ 1328.315971] env[61962]: ERROR nova.compute.manager [instance: bf82c44c-95ce-4e4c-a32f-444bba1ef5cd] Traceback (most recent call last): [ 1328.315971] env[61962]: ERROR nova.compute.manager [instance: bf82c44c-95ce-4e4c-a32f-444bba1ef5cd] File "/opt/stack/nova/nova/compute/manager.py", line 2633, in _build_and_run_instance [ 1328.315971] env[61962]: ERROR nova.compute.manager [instance: bf82c44c-95ce-4e4c-a32f-444bba1ef5cd] self.driver.spawn(context, instance, image_meta, [ 1328.315971] env[61962]: ERROR nova.compute.manager [instance: bf82c44c-95ce-4e4c-a32f-444bba1ef5cd] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 1328.315971] env[61962]: ERROR nova.compute.manager [instance: bf82c44c-95ce-4e4c-a32f-444bba1ef5cd] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1328.315971] env[61962]: ERROR nova.compute.manager [instance: bf82c44c-95ce-4e4c-a32f-444bba1ef5cd] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 1328.315971] env[61962]: ERROR nova.compute.manager [instance: bf82c44c-95ce-4e4c-a32f-444bba1ef5cd] self._fetch_image_if_missing(context, vi) [ 1328.315971] env[61962]: ERROR nova.compute.manager [instance: bf82c44c-95ce-4e4c-a32f-444bba1ef5cd] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 1328.315971] env[61962]: ERROR nova.compute.manager [instance: bf82c44c-95ce-4e4c-a32f-444bba1ef5cd] image_cache(vi, tmp_image_ds_loc) [ 1328.315971] env[61962]: ERROR nova.compute.manager [instance: bf82c44c-95ce-4e4c-a32f-444bba1ef5cd] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 1328.316322] env[61962]: ERROR nova.compute.manager [instance: bf82c44c-95ce-4e4c-a32f-444bba1ef5cd] vm_util.copy_virtual_disk( [ 1328.316322] env[61962]: ERROR nova.compute.manager [instance: bf82c44c-95ce-4e4c-a32f-444bba1ef5cd] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 1328.316322] env[61962]: ERROR nova.compute.manager [instance: bf82c44c-95ce-4e4c-a32f-444bba1ef5cd] session._wait_for_task(vmdk_copy_task) [ 1328.316322] env[61962]: ERROR nova.compute.manager [instance: bf82c44c-95ce-4e4c-a32f-444bba1ef5cd] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 1328.316322] env[61962]: ERROR nova.compute.manager [instance: bf82c44c-95ce-4e4c-a32f-444bba1ef5cd] return self.wait_for_task(task_ref) [ 1328.316322] env[61962]: ERROR nova.compute.manager [instance: bf82c44c-95ce-4e4c-a32f-444bba1ef5cd] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 1328.316322] env[61962]: ERROR nova.compute.manager [instance: bf82c44c-95ce-4e4c-a32f-444bba1ef5cd] return evt.wait() [ 1328.316322] env[61962]: ERROR nova.compute.manager [instance: bf82c44c-95ce-4e4c-a32f-444bba1ef5cd] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 1328.316322] env[61962]: ERROR nova.compute.manager [instance: bf82c44c-95ce-4e4c-a32f-444bba1ef5cd] result = hub.switch() [ 1328.316322] env[61962]: ERROR nova.compute.manager [instance: bf82c44c-95ce-4e4c-a32f-444bba1ef5cd] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 1328.316322] env[61962]: ERROR nova.compute.manager [instance: bf82c44c-95ce-4e4c-a32f-444bba1ef5cd] return self.greenlet.switch() [ 1328.316322] env[61962]: ERROR nova.compute.manager [instance: bf82c44c-95ce-4e4c-a32f-444bba1ef5cd] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 1328.316322] env[61962]: ERROR nova.compute.manager [instance: bf82c44c-95ce-4e4c-a32f-444bba1ef5cd] self.f(*self.args, **self.kw) [ 1328.316624] env[61962]: ERROR nova.compute.manager [instance: bf82c44c-95ce-4e4c-a32f-444bba1ef5cd] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 1328.316624] env[61962]: ERROR nova.compute.manager [instance: bf82c44c-95ce-4e4c-a32f-444bba1ef5cd] raise exceptions.translate_fault(task_info.error) [ 1328.316624] env[61962]: ERROR nova.compute.manager [instance: bf82c44c-95ce-4e4c-a32f-444bba1ef5cd] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1328.316624] env[61962]: ERROR nova.compute.manager [instance: bf82c44c-95ce-4e4c-a32f-444bba1ef5cd] Faults: ['InvalidArgument'] [ 1328.316624] env[61962]: ERROR nova.compute.manager [instance: bf82c44c-95ce-4e4c-a32f-444bba1ef5cd] [ 1328.316735] env[61962]: DEBUG nova.compute.utils [None req-39280a80-f6a9-4818-9c77-d5be7d130e72 tempest-ServersTestFqdnHostnames-1235394237 tempest-ServersTestFqdnHostnames-1235394237-project-member] [instance: bf82c44c-95ce-4e4c-a32f-444bba1ef5cd] VimFaultException {{(pid=61962) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 1328.318529] env[61962]: DEBUG nova.compute.manager [None req-39280a80-f6a9-4818-9c77-d5be7d130e72 tempest-ServersTestFqdnHostnames-1235394237 tempest-ServersTestFqdnHostnames-1235394237-project-member] [instance: bf82c44c-95ce-4e4c-a32f-444bba1ef5cd] Build of instance bf82c44c-95ce-4e4c-a32f-444bba1ef5cd was re-scheduled: A specified parameter was not correct: fileType [ 1328.318529] env[61962]: Faults: ['InvalidArgument'] {{(pid=61962) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2472}} [ 1328.318916] env[61962]: DEBUG nova.compute.manager [None req-39280a80-f6a9-4818-9c77-d5be7d130e72 tempest-ServersTestFqdnHostnames-1235394237 tempest-ServersTestFqdnHostnames-1235394237-project-member] [instance: bf82c44c-95ce-4e4c-a32f-444bba1ef5cd] Unplugging VIFs for instance {{(pid=61962) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:2998}} [ 1328.319199] env[61962]: DEBUG nova.compute.manager [None req-39280a80-f6a9-4818-9c77-d5be7d130e72 tempest-ServersTestFqdnHostnames-1235394237 tempest-ServersTestFqdnHostnames-1235394237-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=61962) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3021}} [ 1328.319489] env[61962]: DEBUG nova.compute.manager [None req-39280a80-f6a9-4818-9c77-d5be7d130e72 tempest-ServersTestFqdnHostnames-1235394237 tempest-ServersTestFqdnHostnames-1235394237-project-member] [instance: bf82c44c-95ce-4e4c-a32f-444bba1ef5cd] Deallocating network for instance {{(pid=61962) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2281}} [ 1328.319679] env[61962]: DEBUG nova.network.neutron [None req-39280a80-f6a9-4818-9c77-d5be7d130e72 tempest-ServersTestFqdnHostnames-1235394237 tempest-ServersTestFqdnHostnames-1235394237-project-member] [instance: bf82c44c-95ce-4e4c-a32f-444bba1ef5cd] deallocate_for_instance() {{(pid=61962) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1328.647929] env[61962]: DEBUG nova.network.neutron [None req-39280a80-f6a9-4818-9c77-d5be7d130e72 tempest-ServersTestFqdnHostnames-1235394237 tempest-ServersTestFqdnHostnames-1235394237-project-member] [instance: bf82c44c-95ce-4e4c-a32f-444bba1ef5cd] Updating instance_info_cache with network_info: [] {{(pid=61962) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1328.662584] env[61962]: INFO nova.compute.manager [None req-39280a80-f6a9-4818-9c77-d5be7d130e72 tempest-ServersTestFqdnHostnames-1235394237 tempest-ServersTestFqdnHostnames-1235394237-project-member] [instance: bf82c44c-95ce-4e4c-a32f-444bba1ef5cd] Took 0.34 seconds to deallocate network for instance. [ 1328.776448] env[61962]: INFO nova.scheduler.client.report [None req-39280a80-f6a9-4818-9c77-d5be7d130e72 tempest-ServersTestFqdnHostnames-1235394237 tempest-ServersTestFqdnHostnames-1235394237-project-member] Deleted allocations for instance bf82c44c-95ce-4e4c-a32f-444bba1ef5cd [ 1328.800935] env[61962]: DEBUG oslo_concurrency.lockutils [None req-39280a80-f6a9-4818-9c77-d5be7d130e72 tempest-ServersTestFqdnHostnames-1235394237 tempest-ServersTestFqdnHostnames-1235394237-project-member] Lock "bf82c44c-95ce-4e4c-a32f-444bba1ef5cd" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 671.548s {{(pid=61962) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1328.802313] env[61962]: DEBUG oslo_concurrency.lockutils [None req-4d8c264b-31b7-4f00-920d-85037918abcd tempest-ServersTestFqdnHostnames-1235394237 tempest-ServersTestFqdnHostnames-1235394237-project-member] Lock "bf82c44c-95ce-4e4c-a32f-444bba1ef5cd" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 475.475s {{(pid=61962) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1328.802558] env[61962]: DEBUG oslo_concurrency.lockutils [None req-4d8c264b-31b7-4f00-920d-85037918abcd tempest-ServersTestFqdnHostnames-1235394237 tempest-ServersTestFqdnHostnames-1235394237-project-member] Acquiring lock "bf82c44c-95ce-4e4c-a32f-444bba1ef5cd-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=61962) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1328.803072] env[61962]: DEBUG oslo_concurrency.lockutils [None req-4d8c264b-31b7-4f00-920d-85037918abcd tempest-ServersTestFqdnHostnames-1235394237 tempest-ServersTestFqdnHostnames-1235394237-project-member] Lock "bf82c44c-95ce-4e4c-a32f-444bba1ef5cd-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=61962) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1328.803256] env[61962]: DEBUG oslo_concurrency.lockutils [None req-4d8c264b-31b7-4f00-920d-85037918abcd tempest-ServersTestFqdnHostnames-1235394237 tempest-ServersTestFqdnHostnames-1235394237-project-member] Lock "bf82c44c-95ce-4e4c-a32f-444bba1ef5cd-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=61962) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1328.807039] env[61962]: INFO nova.compute.manager [None req-4d8c264b-31b7-4f00-920d-85037918abcd tempest-ServersTestFqdnHostnames-1235394237 tempest-ServersTestFqdnHostnames-1235394237-project-member] [instance: bf82c44c-95ce-4e4c-a32f-444bba1ef5cd] Terminating instance [ 1328.808672] env[61962]: DEBUG nova.compute.manager [None req-4d8c264b-31b7-4f00-920d-85037918abcd tempest-ServersTestFqdnHostnames-1235394237 tempest-ServersTestFqdnHostnames-1235394237-project-member] [instance: bf82c44c-95ce-4e4c-a32f-444bba1ef5cd] Start destroying the instance on the hypervisor. {{(pid=61962) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3142}} [ 1328.808865] env[61962]: DEBUG nova.virt.vmwareapi.vmops [None req-4d8c264b-31b7-4f00-920d-85037918abcd tempest-ServersTestFqdnHostnames-1235394237 tempest-ServersTestFqdnHostnames-1235394237-project-member] [instance: bf82c44c-95ce-4e4c-a32f-444bba1ef5cd] Destroying instance {{(pid=61962) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1328.809141] env[61962]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-7fdb709e-f0b1-4659-a2c1-b9eb3dec6673 {{(pid=61962) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1328.816145] env[61962]: DEBUG nova.compute.manager [None req-6a898c7a-d8c8-4cee-8be6-7d5d37c58c60 tempest-ServerPasswordTestJSON-1801851891 tempest-ServerPasswordTestJSON-1801851891-project-member] [instance: c3c36949-754e-4bab-860d-74e0cde4dfd6] Starting instance... {{(pid=61962) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2424}} [ 1328.822864] env[61962]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7d4a1ae5-3aa4-4fd1-b71c-1490ddbfb6bc {{(pid=61962) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1328.859780] env[61962]: WARNING nova.virt.vmwareapi.vmops [None req-4d8c264b-31b7-4f00-920d-85037918abcd tempest-ServersTestFqdnHostnames-1235394237 tempest-ServersTestFqdnHostnames-1235394237-project-member] [instance: bf82c44c-95ce-4e4c-a32f-444bba1ef5cd] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance bf82c44c-95ce-4e4c-a32f-444bba1ef5cd could not be found. [ 1328.860038] env[61962]: DEBUG nova.virt.vmwareapi.vmops [None req-4d8c264b-31b7-4f00-920d-85037918abcd tempest-ServersTestFqdnHostnames-1235394237 tempest-ServersTestFqdnHostnames-1235394237-project-member] [instance: bf82c44c-95ce-4e4c-a32f-444bba1ef5cd] Instance destroyed {{(pid=61962) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1328.860217] env[61962]: INFO nova.compute.manager [None req-4d8c264b-31b7-4f00-920d-85037918abcd tempest-ServersTestFqdnHostnames-1235394237 tempest-ServersTestFqdnHostnames-1235394237-project-member] [instance: bf82c44c-95ce-4e4c-a32f-444bba1ef5cd] Took 0.05 seconds to destroy the instance on the hypervisor. [ 1328.860460] env[61962]: DEBUG oslo.service.loopingcall [None req-4d8c264b-31b7-4f00-920d-85037918abcd tempest-ServersTestFqdnHostnames-1235394237 tempest-ServersTestFqdnHostnames-1235394237-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61962) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1328.863042] env[61962]: DEBUG nova.compute.manager [-] [instance: bf82c44c-95ce-4e4c-a32f-444bba1ef5cd] Deallocating network for instance {{(pid=61962) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2281}} [ 1328.863157] env[61962]: DEBUG nova.network.neutron [-] [instance: bf82c44c-95ce-4e4c-a32f-444bba1ef5cd] deallocate_for_instance() {{(pid=61962) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1328.883372] env[61962]: DEBUG oslo_concurrency.lockutils [None req-6a898c7a-d8c8-4cee-8be6-7d5d37c58c60 tempest-ServerPasswordTestJSON-1801851891 tempest-ServerPasswordTestJSON-1801851891-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61962) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1328.883556] env[61962]: DEBUG oslo_concurrency.lockutils [None req-6a898c7a-d8c8-4cee-8be6-7d5d37c58c60 tempest-ServerPasswordTestJSON-1801851891 tempest-ServerPasswordTestJSON-1801851891-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=61962) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1328.885400] env[61962]: INFO nova.compute.claims [None req-6a898c7a-d8c8-4cee-8be6-7d5d37c58c60 tempest-ServerPasswordTestJSON-1801851891 tempest-ServerPasswordTestJSON-1801851891-project-member] [instance: c3c36949-754e-4bab-860d-74e0cde4dfd6] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1328.907197] env[61962]: DEBUG nova.network.neutron [-] [instance: bf82c44c-95ce-4e4c-a32f-444bba1ef5cd] Updating instance_info_cache with network_info: [] {{(pid=61962) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1328.921880] env[61962]: INFO nova.compute.manager [-] [instance: bf82c44c-95ce-4e4c-a32f-444bba1ef5cd] Took 0.06 seconds to deallocate network for instance. [ 1329.028275] env[61962]: DEBUG oslo_concurrency.lockutils [None req-4d8c264b-31b7-4f00-920d-85037918abcd tempest-ServersTestFqdnHostnames-1235394237 tempest-ServersTestFqdnHostnames-1235394237-project-member] Lock "bf82c44c-95ce-4e4c-a32f-444bba1ef5cd" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 0.226s {{(pid=61962) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1329.029633] env[61962]: DEBUG oslo_concurrency.lockutils [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Lock "bf82c44c-95ce-4e4c-a32f-444bba1ef5cd" acquired by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: waited 198.513s {{(pid=61962) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1329.029903] env[61962]: INFO nova.compute.manager [None req-ba337279-4320-40ca-b616-7e590432f203 None None] [instance: bf82c44c-95ce-4e4c-a32f-444bba1ef5cd] During sync_power_state the instance has a pending task (deleting). Skip. [ 1329.030115] env[61962]: DEBUG oslo_concurrency.lockutils [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Lock "bf82c44c-95ce-4e4c-a32f-444bba1ef5cd" "released" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: held 0.001s {{(pid=61962) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1329.285645] env[61962]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9a9c27f2-c8ef-4873-9aef-07c77c567ca2 {{(pid=61962) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1329.292348] env[61962]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-025462d2-ebf2-4d96-bf3c-662dca0b291f {{(pid=61962) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1329.323360] env[61962]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-88d223f8-9cfe-4915-8cbf-f348eabcc3a0 {{(pid=61962) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1329.331246] env[61962]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-81d8ef24-6a02-4828-9e8d-4217727ff25b {{(pid=61962) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1329.345765] env[61962]: DEBUG nova.compute.provider_tree [None req-6a898c7a-d8c8-4cee-8be6-7d5d37c58c60 tempest-ServerPasswordTestJSON-1801851891 tempest-ServerPasswordTestJSON-1801851891-project-member] Inventory has not changed in ProviderTree for provider: 5a20dc57-fddd-49ec-bab5-953a03eebaa1 {{(pid=61962) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1329.354871] env[61962]: DEBUG nova.scheduler.client.report [None req-6a898c7a-d8c8-4cee-8be6-7d5d37c58c60 tempest-ServerPasswordTestJSON-1801851891 tempest-ServerPasswordTestJSON-1801851891-project-member] Inventory has not changed for provider 5a20dc57-fddd-49ec-bab5-953a03eebaa1 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 96, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61962) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1329.369164] env[61962]: DEBUG oslo_concurrency.lockutils [None req-6a898c7a-d8c8-4cee-8be6-7d5d37c58c60 tempest-ServerPasswordTestJSON-1801851891 tempest-ServerPasswordTestJSON-1801851891-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.485s {{(pid=61962) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1329.369604] env[61962]: DEBUG nova.compute.manager [None req-6a898c7a-d8c8-4cee-8be6-7d5d37c58c60 tempest-ServerPasswordTestJSON-1801851891 tempest-ServerPasswordTestJSON-1801851891-project-member] [instance: c3c36949-754e-4bab-860d-74e0cde4dfd6] Start building networks asynchronously for instance. {{(pid=61962) _build_resources /opt/stack/nova/nova/compute/manager.py:2821}} [ 1329.405866] env[61962]: DEBUG nova.compute.utils [None req-6a898c7a-d8c8-4cee-8be6-7d5d37c58c60 tempest-ServerPasswordTestJSON-1801851891 tempest-ServerPasswordTestJSON-1801851891-project-member] Using /dev/sd instead of None {{(pid=61962) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1329.407459] env[61962]: DEBUG nova.compute.manager [None req-6a898c7a-d8c8-4cee-8be6-7d5d37c58c60 tempest-ServerPasswordTestJSON-1801851891 tempest-ServerPasswordTestJSON-1801851891-project-member] [instance: c3c36949-754e-4bab-860d-74e0cde4dfd6] Allocating IP information in the background. {{(pid=61962) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1973}} [ 1329.407459] env[61962]: DEBUG nova.network.neutron [None req-6a898c7a-d8c8-4cee-8be6-7d5d37c58c60 tempest-ServerPasswordTestJSON-1801851891 tempest-ServerPasswordTestJSON-1801851891-project-member] [instance: c3c36949-754e-4bab-860d-74e0cde4dfd6] allocate_for_instance() {{(pid=61962) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 1329.418217] env[61962]: DEBUG nova.compute.manager [None req-6a898c7a-d8c8-4cee-8be6-7d5d37c58c60 tempest-ServerPasswordTestJSON-1801851891 tempest-ServerPasswordTestJSON-1801851891-project-member] [instance: c3c36949-754e-4bab-860d-74e0cde4dfd6] Start building block device mappings for instance. {{(pid=61962) _build_resources /opt/stack/nova/nova/compute/manager.py:2856}} [ 1329.472324] env[61962]: DEBUG nova.policy [None req-6a898c7a-d8c8-4cee-8be6-7d5d37c58c60 tempest-ServerPasswordTestJSON-1801851891 tempest-ServerPasswordTestJSON-1801851891-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '49692c7648b34a40bead274c69e2dd88', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'eaa1993aea5f4f039f35b32e2321e30f', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61962) authorize /opt/stack/nova/nova/policy.py:203}} [ 1329.482871] env[61962]: DEBUG nova.compute.manager [None req-6a898c7a-d8c8-4cee-8be6-7d5d37c58c60 tempest-ServerPasswordTestJSON-1801851891 tempest-ServerPasswordTestJSON-1801851891-project-member] [instance: c3c36949-754e-4bab-860d-74e0cde4dfd6] Start spawning the instance on the hypervisor. {{(pid=61962) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2630}} [ 1329.508733] env[61962]: DEBUG nova.virt.hardware [None req-6a898c7a-d8c8-4cee-8be6-7d5d37c58c60 tempest-ServerPasswordTestJSON-1801851891 tempest-ServerPasswordTestJSON-1801851891-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-12-01T12:09:23Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=128,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-12-01T12:09:07Z,direct_url=,disk_format='vmdk',id=f684bb17-3ab2-4bd5-a19e-4c36c57d0ebe,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='eef556fb5c4f49b889491ae31a496de5',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-12-01T12:09:08Z,virtual_size=,visibility=), allow threads: False {{(pid=61962) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1329.508988] env[61962]: DEBUG nova.virt.hardware [None req-6a898c7a-d8c8-4cee-8be6-7d5d37c58c60 tempest-ServerPasswordTestJSON-1801851891 tempest-ServerPasswordTestJSON-1801851891-project-member] Flavor limits 0:0:0 {{(pid=61962) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1329.509167] env[61962]: DEBUG nova.virt.hardware [None req-6a898c7a-d8c8-4cee-8be6-7d5d37c58c60 tempest-ServerPasswordTestJSON-1801851891 tempest-ServerPasswordTestJSON-1801851891-project-member] Image limits 0:0:0 {{(pid=61962) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1329.509385] env[61962]: DEBUG nova.virt.hardware [None req-6a898c7a-d8c8-4cee-8be6-7d5d37c58c60 tempest-ServerPasswordTestJSON-1801851891 tempest-ServerPasswordTestJSON-1801851891-project-member] Flavor pref 0:0:0 {{(pid=61962) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1329.509530] env[61962]: DEBUG nova.virt.hardware [None req-6a898c7a-d8c8-4cee-8be6-7d5d37c58c60 tempest-ServerPasswordTestJSON-1801851891 tempest-ServerPasswordTestJSON-1801851891-project-member] Image pref 0:0:0 {{(pid=61962) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1329.509698] env[61962]: DEBUG nova.virt.hardware [None req-6a898c7a-d8c8-4cee-8be6-7d5d37c58c60 tempest-ServerPasswordTestJSON-1801851891 tempest-ServerPasswordTestJSON-1801851891-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61962) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1329.509913] env[61962]: DEBUG nova.virt.hardware [None req-6a898c7a-d8c8-4cee-8be6-7d5d37c58c60 tempest-ServerPasswordTestJSON-1801851891 tempest-ServerPasswordTestJSON-1801851891-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61962) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1329.510087] env[61962]: DEBUG nova.virt.hardware [None req-6a898c7a-d8c8-4cee-8be6-7d5d37c58c60 tempest-ServerPasswordTestJSON-1801851891 tempest-ServerPasswordTestJSON-1801851891-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61962) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1329.510257] env[61962]: DEBUG nova.virt.hardware [None req-6a898c7a-d8c8-4cee-8be6-7d5d37c58c60 tempest-ServerPasswordTestJSON-1801851891 tempest-ServerPasswordTestJSON-1801851891-project-member] Got 1 possible topologies {{(pid=61962) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1329.510422] env[61962]: DEBUG nova.virt.hardware [None req-6a898c7a-d8c8-4cee-8be6-7d5d37c58c60 tempest-ServerPasswordTestJSON-1801851891 tempest-ServerPasswordTestJSON-1801851891-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61962) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1329.510596] env[61962]: DEBUG nova.virt.hardware [None req-6a898c7a-d8c8-4cee-8be6-7d5d37c58c60 tempest-ServerPasswordTestJSON-1801851891 tempest-ServerPasswordTestJSON-1801851891-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61962) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1329.511481] env[61962]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5ebd9782-1ee3-4207-b80d-00a809ba56e8 {{(pid=61962) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1329.520049] env[61962]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f9a5a38e-3aab-457c-978d-edde4b75c614 {{(pid=61962) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1330.046470] env[61962]: DEBUG nova.network.neutron [None req-6a898c7a-d8c8-4cee-8be6-7d5d37c58c60 tempest-ServerPasswordTestJSON-1801851891 tempest-ServerPasswordTestJSON-1801851891-project-member] [instance: c3c36949-754e-4bab-860d-74e0cde4dfd6] Successfully created port: 44453a1c-b94e-48b8-a8f0-4d8d447a8444 {{(pid=61962) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1330.741845] env[61962]: DEBUG nova.compute.manager [req-df3b5f11-d06e-48fb-b497-e8e1192da335 req-fdaac4f9-fff9-4be4-a577-d8949f968af8 service nova] [instance: c3c36949-754e-4bab-860d-74e0cde4dfd6] Received event network-vif-plugged-44453a1c-b94e-48b8-a8f0-4d8d447a8444 {{(pid=61962) external_instance_event /opt/stack/nova/nova/compute/manager.py:11210}} [ 1330.742112] env[61962]: DEBUG oslo_concurrency.lockutils [req-df3b5f11-d06e-48fb-b497-e8e1192da335 req-fdaac4f9-fff9-4be4-a577-d8949f968af8 service nova] Acquiring lock "c3c36949-754e-4bab-860d-74e0cde4dfd6-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=61962) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1330.742311] env[61962]: DEBUG oslo_concurrency.lockutils [req-df3b5f11-d06e-48fb-b497-e8e1192da335 req-fdaac4f9-fff9-4be4-a577-d8949f968af8 service nova] Lock "c3c36949-754e-4bab-860d-74e0cde4dfd6-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=61962) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1330.742493] env[61962]: DEBUG oslo_concurrency.lockutils [req-df3b5f11-d06e-48fb-b497-e8e1192da335 req-fdaac4f9-fff9-4be4-a577-d8949f968af8 service nova] Lock "c3c36949-754e-4bab-860d-74e0cde4dfd6-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=61962) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1330.742659] env[61962]: DEBUG nova.compute.manager [req-df3b5f11-d06e-48fb-b497-e8e1192da335 req-fdaac4f9-fff9-4be4-a577-d8949f968af8 service nova] [instance: c3c36949-754e-4bab-860d-74e0cde4dfd6] No waiting events found dispatching network-vif-plugged-44453a1c-b94e-48b8-a8f0-4d8d447a8444 {{(pid=61962) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1330.742825] env[61962]: WARNING nova.compute.manager [req-df3b5f11-d06e-48fb-b497-e8e1192da335 req-fdaac4f9-fff9-4be4-a577-d8949f968af8 service nova] [instance: c3c36949-754e-4bab-860d-74e0cde4dfd6] Received unexpected event network-vif-plugged-44453a1c-b94e-48b8-a8f0-4d8d447a8444 for instance with vm_state building and task_state spawning. [ 1330.839513] env[61962]: DEBUG nova.network.neutron [None req-6a898c7a-d8c8-4cee-8be6-7d5d37c58c60 tempest-ServerPasswordTestJSON-1801851891 tempest-ServerPasswordTestJSON-1801851891-project-member] [instance: c3c36949-754e-4bab-860d-74e0cde4dfd6] Successfully updated port: 44453a1c-b94e-48b8-a8f0-4d8d447a8444 {{(pid=61962) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1330.851758] env[61962]: DEBUG oslo_concurrency.lockutils [None req-6a898c7a-d8c8-4cee-8be6-7d5d37c58c60 tempest-ServerPasswordTestJSON-1801851891 tempest-ServerPasswordTestJSON-1801851891-project-member] Acquiring lock "refresh_cache-c3c36949-754e-4bab-860d-74e0cde4dfd6" {{(pid=61962) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1330.851967] env[61962]: DEBUG oslo_concurrency.lockutils [None req-6a898c7a-d8c8-4cee-8be6-7d5d37c58c60 tempest-ServerPasswordTestJSON-1801851891 tempest-ServerPasswordTestJSON-1801851891-project-member] Acquired lock "refresh_cache-c3c36949-754e-4bab-860d-74e0cde4dfd6" {{(pid=61962) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1330.852212] env[61962]: DEBUG nova.network.neutron [None req-6a898c7a-d8c8-4cee-8be6-7d5d37c58c60 tempest-ServerPasswordTestJSON-1801851891 tempest-ServerPasswordTestJSON-1801851891-project-member] [instance: c3c36949-754e-4bab-860d-74e0cde4dfd6] Building network info cache for instance {{(pid=61962) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 1330.905388] env[61962]: DEBUG nova.network.neutron [None req-6a898c7a-d8c8-4cee-8be6-7d5d37c58c60 tempest-ServerPasswordTestJSON-1801851891 tempest-ServerPasswordTestJSON-1801851891-project-member] [instance: c3c36949-754e-4bab-860d-74e0cde4dfd6] Instance cache missing network info. {{(pid=61962) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 1331.175689] env[61962]: DEBUG nova.network.neutron [None req-6a898c7a-d8c8-4cee-8be6-7d5d37c58c60 tempest-ServerPasswordTestJSON-1801851891 tempest-ServerPasswordTestJSON-1801851891-project-member] [instance: c3c36949-754e-4bab-860d-74e0cde4dfd6] Updating instance_info_cache with network_info: [{"id": "44453a1c-b94e-48b8-a8f0-4d8d447a8444", "address": "fa:16:3e:79:92:41", "network": {"id": "03b29cb7-6ae4-49b2-8a98-82a63e2ce563", "bridge": "br-int", "label": "tempest-ServerPasswordTestJSON-1452346072-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "eaa1993aea5f4f039f35b32e2321e30f", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "20e3f794-c7a3-4696-9488-ecf34c570ef9", "external-id": "nsx-vlan-transportzone-509", "segmentation_id": 509, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap44453a1c-b9", "ovs_interfaceid": "44453a1c-b94e-48b8-a8f0-4d8d447a8444", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61962) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1331.193949] env[61962]: DEBUG oslo_concurrency.lockutils [None req-6a898c7a-d8c8-4cee-8be6-7d5d37c58c60 tempest-ServerPasswordTestJSON-1801851891 tempest-ServerPasswordTestJSON-1801851891-project-member] Releasing lock "refresh_cache-c3c36949-754e-4bab-860d-74e0cde4dfd6" {{(pid=61962) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1331.193949] env[61962]: DEBUG nova.compute.manager [None req-6a898c7a-d8c8-4cee-8be6-7d5d37c58c60 tempest-ServerPasswordTestJSON-1801851891 tempest-ServerPasswordTestJSON-1801851891-project-member] [instance: c3c36949-754e-4bab-860d-74e0cde4dfd6] Instance network_info: |[{"id": "44453a1c-b94e-48b8-a8f0-4d8d447a8444", "address": "fa:16:3e:79:92:41", "network": {"id": "03b29cb7-6ae4-49b2-8a98-82a63e2ce563", "bridge": "br-int", "label": "tempest-ServerPasswordTestJSON-1452346072-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "eaa1993aea5f4f039f35b32e2321e30f", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "20e3f794-c7a3-4696-9488-ecf34c570ef9", "external-id": "nsx-vlan-transportzone-509", "segmentation_id": 509, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap44453a1c-b9", "ovs_interfaceid": "44453a1c-b94e-48b8-a8f0-4d8d447a8444", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=61962) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1331.194703] env[61962]: DEBUG nova.virt.vmwareapi.vmops [None req-6a898c7a-d8c8-4cee-8be6-7d5d37c58c60 tempest-ServerPasswordTestJSON-1801851891 tempest-ServerPasswordTestJSON-1801851891-project-member] [instance: c3c36949-754e-4bab-860d-74e0cde4dfd6] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:79:92:41', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '20e3f794-c7a3-4696-9488-ecf34c570ef9', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '44453a1c-b94e-48b8-a8f0-4d8d447a8444', 'vif_model': 'vmxnet3'}] {{(pid=61962) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1331.201875] env[61962]: DEBUG nova.virt.vmwareapi.vm_util [None req-6a898c7a-d8c8-4cee-8be6-7d5d37c58c60 tempest-ServerPasswordTestJSON-1801851891 tempest-ServerPasswordTestJSON-1801851891-project-member] Creating folder: Project (eaa1993aea5f4f039f35b32e2321e30f). Parent ref: group-v953327. {{(pid=61962) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 1331.203525] env[61962]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-934dd870-d866-404d-97c2-844e423089a9 {{(pid=61962) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1331.214284] env[61962]: INFO nova.virt.vmwareapi.vm_util [None req-6a898c7a-d8c8-4cee-8be6-7d5d37c58c60 tempest-ServerPasswordTestJSON-1801851891 tempest-ServerPasswordTestJSON-1801851891-project-member] Created folder: Project (eaa1993aea5f4f039f35b32e2321e30f) in parent group-v953327. [ 1331.214572] env[61962]: DEBUG nova.virt.vmwareapi.vm_util [None req-6a898c7a-d8c8-4cee-8be6-7d5d37c58c60 tempest-ServerPasswordTestJSON-1801851891 tempest-ServerPasswordTestJSON-1801851891-project-member] Creating folder: Instances. Parent ref: group-v953403. {{(pid=61962) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 1331.215119] env[61962]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-e7c734f1-28ee-4869-8fad-a011289005ba {{(pid=61962) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1331.225963] env[61962]: INFO nova.virt.vmwareapi.vm_util [None req-6a898c7a-d8c8-4cee-8be6-7d5d37c58c60 tempest-ServerPasswordTestJSON-1801851891 tempest-ServerPasswordTestJSON-1801851891-project-member] Created folder: Instances in parent group-v953403. [ 1331.226239] env[61962]: DEBUG oslo.service.loopingcall [None req-6a898c7a-d8c8-4cee-8be6-7d5d37c58c60 tempest-ServerPasswordTestJSON-1801851891 tempest-ServerPasswordTestJSON-1801851891-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=61962) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1331.226446] env[61962]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: c3c36949-754e-4bab-860d-74e0cde4dfd6] Creating VM on the ESX host {{(pid=61962) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 1331.226708] env[61962]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-5ba4af1a-5b23-411d-b894-60a0f306f428 {{(pid=61962) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1331.247368] env[61962]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1331.247368] env[61962]: value = "task-4892031" [ 1331.247368] env[61962]: _type = "Task" [ 1331.247368] env[61962]: } to complete. {{(pid=61962) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1331.256051] env[61962]: DEBUG oslo_vmware.api [-] Task: {'id': task-4892031, 'name': CreateVM_Task} progress is 0%. {{(pid=61962) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1331.757682] env[61962]: DEBUG oslo_vmware.api [-] Task: {'id': task-4892031, 'name': CreateVM_Task, 'duration_secs': 0.352512} completed successfully. {{(pid=61962) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1331.757872] env[61962]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: c3c36949-754e-4bab-860d-74e0cde4dfd6] Created VM on the ESX host {{(pid=61962) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 1331.758579] env[61962]: DEBUG oslo_concurrency.lockutils [None req-6a898c7a-d8c8-4cee-8be6-7d5d37c58c60 tempest-ServerPasswordTestJSON-1801851891 tempest-ServerPasswordTestJSON-1801851891-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/f684bb17-3ab2-4bd5-a19e-4c36c57d0ebe" {{(pid=61962) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1331.758746] env[61962]: DEBUG oslo_concurrency.lockutils [None req-6a898c7a-d8c8-4cee-8be6-7d5d37c58c60 tempest-ServerPasswordTestJSON-1801851891 tempest-ServerPasswordTestJSON-1801851891-project-member] Acquired lock "[datastore2] devstack-image-cache_base/f684bb17-3ab2-4bd5-a19e-4c36c57d0ebe" {{(pid=61962) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1331.759090] env[61962]: DEBUG oslo_concurrency.lockutils [None req-6a898c7a-d8c8-4cee-8be6-7d5d37c58c60 tempest-ServerPasswordTestJSON-1801851891 tempest-ServerPasswordTestJSON-1801851891-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/f684bb17-3ab2-4bd5-a19e-4c36c57d0ebe" {{(pid=61962) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1331.759361] env[61962]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-b93c8c82-5b59-47d1-98da-d54ed10d1d96 {{(pid=61962) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1331.764786] env[61962]: DEBUG oslo_vmware.api [None req-6a898c7a-d8c8-4cee-8be6-7d5d37c58c60 tempest-ServerPasswordTestJSON-1801851891 tempest-ServerPasswordTestJSON-1801851891-project-member] Waiting for the task: (returnval){ [ 1331.764786] env[61962]: value = "session[5210918f-aadc-9b29-42fa-0929c2e42627]52bbeea0-f5d8-2045-5695-cf6a9a6873b5" [ 1331.764786] env[61962]: _type = "Task" [ 1331.764786] env[61962]: } to complete. {{(pid=61962) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1331.774189] env[61962]: DEBUG oslo_vmware.api [None req-6a898c7a-d8c8-4cee-8be6-7d5d37c58c60 tempest-ServerPasswordTestJSON-1801851891 tempest-ServerPasswordTestJSON-1801851891-project-member] Task: {'id': session[5210918f-aadc-9b29-42fa-0929c2e42627]52bbeea0-f5d8-2045-5695-cf6a9a6873b5, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61962) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1332.276929] env[61962]: DEBUG oslo_concurrency.lockutils [None req-6a898c7a-d8c8-4cee-8be6-7d5d37c58c60 tempest-ServerPasswordTestJSON-1801851891 tempest-ServerPasswordTestJSON-1801851891-project-member] Releasing lock "[datastore2] devstack-image-cache_base/f684bb17-3ab2-4bd5-a19e-4c36c57d0ebe" {{(pid=61962) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1332.277279] env[61962]: DEBUG nova.virt.vmwareapi.vmops [None req-6a898c7a-d8c8-4cee-8be6-7d5d37c58c60 tempest-ServerPasswordTestJSON-1801851891 tempest-ServerPasswordTestJSON-1801851891-project-member] [instance: c3c36949-754e-4bab-860d-74e0cde4dfd6] Processing image f684bb17-3ab2-4bd5-a19e-4c36c57d0ebe {{(pid=61962) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1332.277486] env[61962]: DEBUG oslo_concurrency.lockutils [None req-6a898c7a-d8c8-4cee-8be6-7d5d37c58c60 tempest-ServerPasswordTestJSON-1801851891 tempest-ServerPasswordTestJSON-1801851891-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/f684bb17-3ab2-4bd5-a19e-4c36c57d0ebe/f684bb17-3ab2-4bd5-a19e-4c36c57d0ebe.vmdk" {{(pid=61962) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1332.780404] env[61962]: DEBUG nova.compute.manager [req-551ac333-9e23-4190-bc40-eedf79f233cb req-0c8dede4-25a3-46d3-a05a-e47415a4fae2 service nova] [instance: c3c36949-754e-4bab-860d-74e0cde4dfd6] Received event network-changed-44453a1c-b94e-48b8-a8f0-4d8d447a8444 {{(pid=61962) external_instance_event /opt/stack/nova/nova/compute/manager.py:11210}} [ 1332.780610] env[61962]: DEBUG nova.compute.manager [req-551ac333-9e23-4190-bc40-eedf79f233cb req-0c8dede4-25a3-46d3-a05a-e47415a4fae2 service nova] [instance: c3c36949-754e-4bab-860d-74e0cde4dfd6] Refreshing instance network info cache due to event network-changed-44453a1c-b94e-48b8-a8f0-4d8d447a8444. {{(pid=61962) external_instance_event /opt/stack/nova/nova/compute/manager.py:11215}} [ 1332.780815] env[61962]: DEBUG oslo_concurrency.lockutils [req-551ac333-9e23-4190-bc40-eedf79f233cb req-0c8dede4-25a3-46d3-a05a-e47415a4fae2 service nova] Acquiring lock "refresh_cache-c3c36949-754e-4bab-860d-74e0cde4dfd6" {{(pid=61962) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1332.780967] env[61962]: DEBUG oslo_concurrency.lockutils [req-551ac333-9e23-4190-bc40-eedf79f233cb req-0c8dede4-25a3-46d3-a05a-e47415a4fae2 service nova] Acquired lock "refresh_cache-c3c36949-754e-4bab-860d-74e0cde4dfd6" {{(pid=61962) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1332.781303] env[61962]: DEBUG nova.network.neutron [req-551ac333-9e23-4190-bc40-eedf79f233cb req-0c8dede4-25a3-46d3-a05a-e47415a4fae2 service nova] [instance: c3c36949-754e-4bab-860d-74e0cde4dfd6] Refreshing network info cache for port 44453a1c-b94e-48b8-a8f0-4d8d447a8444 {{(pid=61962) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 1333.097723] env[61962]: DEBUG nova.network.neutron [req-551ac333-9e23-4190-bc40-eedf79f233cb req-0c8dede4-25a3-46d3-a05a-e47415a4fae2 service nova] [instance: c3c36949-754e-4bab-860d-74e0cde4dfd6] Updated VIF entry in instance network info cache for port 44453a1c-b94e-48b8-a8f0-4d8d447a8444. {{(pid=61962) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 1333.098692] env[61962]: DEBUG nova.network.neutron [req-551ac333-9e23-4190-bc40-eedf79f233cb req-0c8dede4-25a3-46d3-a05a-e47415a4fae2 service nova] [instance: c3c36949-754e-4bab-860d-74e0cde4dfd6] Updating instance_info_cache with network_info: [{"id": "44453a1c-b94e-48b8-a8f0-4d8d447a8444", "address": "fa:16:3e:79:92:41", "network": {"id": "03b29cb7-6ae4-49b2-8a98-82a63e2ce563", "bridge": "br-int", "label": "tempest-ServerPasswordTestJSON-1452346072-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "eaa1993aea5f4f039f35b32e2321e30f", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "20e3f794-c7a3-4696-9488-ecf34c570ef9", "external-id": "nsx-vlan-transportzone-509", "segmentation_id": 509, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap44453a1c-b9", "ovs_interfaceid": "44453a1c-b94e-48b8-a8f0-4d8d447a8444", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61962) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1333.110193] env[61962]: DEBUG oslo_concurrency.lockutils [req-551ac333-9e23-4190-bc40-eedf79f233cb req-0c8dede4-25a3-46d3-a05a-e47415a4fae2 service nova] Releasing lock "refresh_cache-c3c36949-754e-4bab-860d-74e0cde4dfd6" {{(pid=61962) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1343.158480] env[61962]: DEBUG oslo_service.periodic_task [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Running periodic task ComputeManager.update_available_resource {{(pid=61962) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1343.169568] env[61962]: DEBUG oslo_concurrency.lockutils [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=61962) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1343.169822] env[61962]: DEBUG oslo_concurrency.lockutils [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=61962) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1343.169992] env[61962]: DEBUG oslo_concurrency.lockutils [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=61962) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1343.170172] env[61962]: DEBUG nova.compute.resource_tracker [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=61962) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 1343.172914] env[61962]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-043fbde3-e794-4f63-97aa-598cb3f3b055 {{(pid=61962) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1343.189694] env[61962]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4ce41ebc-2454-4d22-9802-4bbeeb462683 {{(pid=61962) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1343.205410] env[61962]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f5eb6076-027c-4ec9-9382-09e7f5e8e97e {{(pid=61962) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1343.213395] env[61962]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2772b29d-2158-4540-8cd7-02a7f7442bfc {{(pid=61962) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1343.244297] env[61962]: DEBUG nova.compute.resource_tracker [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=180566MB free_disk=96GB free_vcpus=48 pci_devices=None {{(pid=61962) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 1343.244472] env[61962]: DEBUG oslo_concurrency.lockutils [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=61962) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1343.244693] env[61962]: DEBUG oslo_concurrency.lockutils [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=61962) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1343.337505] env[61962]: DEBUG nova.compute.resource_tracker [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Instance 1dcafe3b-989b-4290-be54-ceccc62a323f actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61962) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 1343.337604] env[61962]: DEBUG nova.compute.resource_tracker [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Instance 454eebcc-ea18-4046-ba2e-01de1c8a954a actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61962) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 1343.338062] env[61962]: DEBUG nova.compute.resource_tracker [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Instance e76b5908-3bad-4c19-90e7-b15ca253ad80 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61962) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 1343.338062] env[61962]: DEBUG nova.compute.resource_tracker [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Instance 8ecd73a7-7f23-4994-85b9-479584831687 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61962) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 1343.338062] env[61962]: DEBUG nova.compute.resource_tracker [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Instance d89abe69-dd71-4cfe-b80b-3b2aeba0b8a9 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61962) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 1343.338195] env[61962]: DEBUG nova.compute.resource_tracker [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Instance 5572a1d3-5d2f-4af5-b9f6-ec57506e2069 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61962) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 1343.338195] env[61962]: DEBUG nova.compute.resource_tracker [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Instance d970d822-12ab-43cc-8d34-abc6eff8a68d actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61962) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 1343.338314] env[61962]: DEBUG nova.compute.resource_tracker [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Instance 35bf25a7-a3c2-40f1-b415-42d6167b0a1c actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61962) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 1343.338428] env[61962]: DEBUG nova.compute.resource_tracker [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Instance 1f7429fe-e6f3-4b0a-bae8-38ead4607ae7 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61962) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 1343.338540] env[61962]: DEBUG nova.compute.resource_tracker [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Instance c3c36949-754e-4bab-860d-74e0cde4dfd6 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61962) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 1343.350482] env[61962]: DEBUG nova.compute.resource_tracker [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Instance 75aed9fe-6947-46b3-9c7d-838bf0f108af has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61962) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1767}} [ 1343.362639] env[61962]: DEBUG nova.compute.resource_tracker [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Instance 0b089824-a1ec-476a-932a-6cba27a09e4e has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61962) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1767}} [ 1343.376511] env[61962]: DEBUG nova.compute.resource_tracker [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Instance 0fd2335c-e9e9-41bb-b77a-22b6f2c47946 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61962) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1767}} [ 1343.387988] env[61962]: DEBUG nova.compute.resource_tracker [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Instance 4d2bbdf5-0581-4a85-b962-36686a464bff has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61962) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1767}} [ 1343.399143] env[61962]: DEBUG nova.compute.resource_tracker [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Instance 989e57f2-6dac-4734-b895-5724b1c7a06b has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61962) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1767}} [ 1343.413703] env[61962]: DEBUG nova.compute.resource_tracker [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Instance 6f0c884b-afb3-4d06-9dd2-f07099c364e0 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61962) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1767}} [ 1343.425173] env[61962]: DEBUG nova.compute.resource_tracker [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Instance 2d9a9155-1dbb-4984-99dd-d40552eef7db has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61962) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1767}} [ 1343.438895] env[61962]: DEBUG nova.compute.resource_tracker [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Instance a2209177-23ac-4367-9736-2978ffd6fb39 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61962) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1767}} [ 1343.453022] env[61962]: DEBUG nova.compute.resource_tracker [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Instance 5b261fba-bbdf-4ba9-a065-e833f6e44ba2 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61962) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1767}} [ 1343.464833] env[61962]: DEBUG nova.compute.resource_tracker [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Instance 4453ec6d-57f7-4ae1-bafa-4dd993c22d03 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61962) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1767}} [ 1343.475965] env[61962]: DEBUG nova.compute.resource_tracker [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Instance be426e8c-aac5-4510-a9e2-20cc79a00cae has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61962) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1767}} [ 1343.488516] env[61962]: DEBUG nova.compute.resource_tracker [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Instance e62eace3-3320-441e-becb-abff7cdd6660 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61962) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1767}} [ 1343.501794] env[61962]: DEBUG nova.compute.resource_tracker [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Instance 3d2749f8-18d2-4631-adc2-093988125eb4 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61962) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1767}} [ 1343.513369] env[61962]: DEBUG nova.compute.resource_tracker [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Instance 1035139f-2193-4d1c-a220-369ef6096ae0 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61962) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1767}} [ 1343.513531] env[61962]: DEBUG nova.compute.resource_tracker [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Total usable vcpus: 48, total allocated vcpus: 10 {{(pid=61962) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 1343.514266] env[61962]: DEBUG nova.compute.resource_tracker [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=1792MB phys_disk=100GB used_disk=10GB total_vcpus=48 used_vcpus=10 pci_stats=[] stats={'failed_builds': '54', 'num_instances': '10', 'num_vm_building': '10', 'num_task_deleting': '9', 'num_os_type_None': '10', 'num_proj_7738f0dd32484bf9978c5d006d088c3b': '1', 'io_workload': '10', 'num_proj_a31ea93dd586435ebb82c1dbfd14c2ad': '1', 'num_proj_3285f5bbd10d495e852d0ef0c0047847': '1', 'num_proj_8e79e89d66b44fa092d586334732c0be': '1', 'num_proj_e58b68272c404983993a09ad12e8c03b': '1', 'num_proj_99c377cc8b5545a2b6e712291b14cb04': '1', 'num_proj_696cc1bc78cb4511a76f788d82895fef': '1', 'num_proj_a9c2add945d24a299e5bb0fe543deec3': '1', 'num_proj_e816704de44649099e96127498eed7e9': '1', 'num_task_spawning': '1', 'num_proj_eaa1993aea5f4f039f35b32e2321e30f': '1'} {{(pid=61962) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 1343.941545] env[61962]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b15ce724-166e-47c2-ac5c-f7bc1791adbc {{(pid=61962) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1343.957562] env[61962]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1f71b186-9f8b-47db-bf2c-9ff191393503 {{(pid=61962) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1344.013943] env[61962]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-edfaaaf6-abc4-452c-ab36-e443bd72cc10 {{(pid=61962) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1344.026087] env[61962]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6705bc6e-daa9-4f83-b016-e6c3c6f150d0 {{(pid=61962) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1344.046261] env[61962]: DEBUG nova.compute.provider_tree [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Inventory has not changed in ProviderTree for provider: 5a20dc57-fddd-49ec-bab5-953a03eebaa1 {{(pid=61962) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1344.057615] env[61962]: DEBUG nova.scheduler.client.report [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Inventory has not changed for provider 5a20dc57-fddd-49ec-bab5-953a03eebaa1 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 96, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61962) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1344.077991] env[61962]: DEBUG nova.compute.resource_tracker [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=61962) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 1344.078211] env[61962]: DEBUG oslo_concurrency.lockutils [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 0.834s {{(pid=61962) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1345.079137] env[61962]: DEBUG oslo_service.periodic_task [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=61962) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1345.079137] env[61962]: DEBUG nova.compute.manager [None req-ba337279-4320-40ca-b616-7e590432f203 None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=61962) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10626}} [ 1346.154708] env[61962]: DEBUG oslo_service.periodic_task [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=61962) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1349.812168] env[61962]: DEBUG oslo_concurrency.lockutils [None req-e21c42e3-3594-408e-b517-f54e516b53cd tempest-ServersNegativeTestMultiTenantJSON-212472390 tempest-ServersNegativeTestMultiTenantJSON-212472390-project-member] Acquiring lock "53048279-c3ea-484f-9a3e-049b12892f7a" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61962) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1349.812539] env[61962]: DEBUG oslo_concurrency.lockutils [None req-e21c42e3-3594-408e-b517-f54e516b53cd tempest-ServersNegativeTestMultiTenantJSON-212472390 tempest-ServersNegativeTestMultiTenantJSON-212472390-project-member] Lock "53048279-c3ea-484f-9a3e-049b12892f7a" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61962) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1350.153712] env[61962]: DEBUG oslo_service.periodic_task [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Running periodic task ComputeManager._sync_scheduler_instance_info {{(pid=61962) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1351.157846] env[61962]: DEBUG oslo_service.periodic_task [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=61962) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1351.158156] env[61962]: DEBUG nova.compute.manager [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Starting heal instance info cache {{(pid=61962) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10007}} [ 1351.158156] env[61962]: DEBUG nova.compute.manager [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Rebuilding the list of instances to heal {{(pid=61962) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10011}} [ 1351.181250] env[61962]: DEBUG nova.compute.manager [None req-ba337279-4320-40ca-b616-7e590432f203 None None] [instance: 1dcafe3b-989b-4290-be54-ceccc62a323f] Skipping network cache update for instance because it is Building. {{(pid=61962) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10020}} [ 1351.181411] env[61962]: DEBUG nova.compute.manager [None req-ba337279-4320-40ca-b616-7e590432f203 None None] [instance: 454eebcc-ea18-4046-ba2e-01de1c8a954a] Skipping network cache update for instance because it is Building. {{(pid=61962) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10020}} [ 1351.182546] env[61962]: DEBUG nova.compute.manager [None req-ba337279-4320-40ca-b616-7e590432f203 None None] [instance: e76b5908-3bad-4c19-90e7-b15ca253ad80] Skipping network cache update for instance because it is Building. {{(pid=61962) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10020}} [ 1351.182546] env[61962]: DEBUG nova.compute.manager [None req-ba337279-4320-40ca-b616-7e590432f203 None None] [instance: 8ecd73a7-7f23-4994-85b9-479584831687] Skipping network cache update for instance because it is Building. {{(pid=61962) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10020}} [ 1351.182546] env[61962]: DEBUG nova.compute.manager [None req-ba337279-4320-40ca-b616-7e590432f203 None None] [instance: d89abe69-dd71-4cfe-b80b-3b2aeba0b8a9] Skipping network cache update for instance because it is Building. {{(pid=61962) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10020}} [ 1351.182656] env[61962]: DEBUG nova.compute.manager [None req-ba337279-4320-40ca-b616-7e590432f203 None None] [instance: 5572a1d3-5d2f-4af5-b9f6-ec57506e2069] Skipping network cache update for instance because it is Building. {{(pid=61962) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10020}} [ 1351.182817] env[61962]: DEBUG nova.compute.manager [None req-ba337279-4320-40ca-b616-7e590432f203 None None] [instance: d970d822-12ab-43cc-8d34-abc6eff8a68d] Skipping network cache update for instance because it is Building. {{(pid=61962) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10020}} [ 1351.182943] env[61962]: DEBUG nova.compute.manager [None req-ba337279-4320-40ca-b616-7e590432f203 None None] [instance: 35bf25a7-a3c2-40f1-b415-42d6167b0a1c] Skipping network cache update for instance because it is Building. {{(pid=61962) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10020}} [ 1351.183075] env[61962]: DEBUG nova.compute.manager [None req-ba337279-4320-40ca-b616-7e590432f203 None None] [instance: 1f7429fe-e6f3-4b0a-bae8-38ead4607ae7] Skipping network cache update for instance because it is Building. {{(pid=61962) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10020}} [ 1351.183192] env[61962]: DEBUG nova.compute.manager [None req-ba337279-4320-40ca-b616-7e590432f203 None None] [instance: c3c36949-754e-4bab-860d-74e0cde4dfd6] Skipping network cache update for instance because it is Building. {{(pid=61962) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10020}} [ 1351.183346] env[61962]: DEBUG nova.compute.manager [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Didn't find any instances for network info cache update. {{(pid=61962) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10093}} [ 1352.158347] env[61962]: DEBUG oslo_service.periodic_task [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=61962) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1353.160036] env[61962]: DEBUG oslo_service.periodic_task [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=61962) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1354.706599] env[61962]: DEBUG oslo_concurrency.lockutils [None req-f83271b1-1065-4bfa-a828-82740b56297b tempest-ServerPasswordTestJSON-1801851891 tempest-ServerPasswordTestJSON-1801851891-project-member] Acquiring lock "c3c36949-754e-4bab-860d-74e0cde4dfd6" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=61962) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1355.158185] env[61962]: DEBUG oslo_service.periodic_task [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=61962) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1357.158643] env[61962]: DEBUG oslo_service.periodic_task [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=61962) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1357.158956] env[61962]: DEBUG oslo_service.periodic_task [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=61962) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1377.005032] env[61962]: WARNING oslo_vmware.rw_handles [None req-aeada297-64d9-442f-a2b8-fc450e4cd4d2 tempest-ServersAdminTestJSON-987189382 tempest-ServersAdminTestJSON-987189382-project-member] Error occurred while reading the HTTP response.: http.client.RemoteDisconnected: Remote end closed connection without response [ 1377.005032] env[61962]: ERROR oslo_vmware.rw_handles Traceback (most recent call last): [ 1377.005032] env[61962]: ERROR oslo_vmware.rw_handles File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py", line 283, in close [ 1377.005032] env[61962]: ERROR oslo_vmware.rw_handles self._conn.getresponse() [ 1377.005032] env[61962]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 1375, in getresponse [ 1377.005032] env[61962]: ERROR oslo_vmware.rw_handles response.begin() [ 1377.005032] env[61962]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 318, in begin [ 1377.005032] env[61962]: ERROR oslo_vmware.rw_handles version, status, reason = self._read_status() [ 1377.005032] env[61962]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 287, in _read_status [ 1377.005032] env[61962]: ERROR oslo_vmware.rw_handles raise RemoteDisconnected("Remote end closed connection without" [ 1377.005032] env[61962]: ERROR oslo_vmware.rw_handles http.client.RemoteDisconnected: Remote end closed connection without response [ 1377.005032] env[61962]: ERROR oslo_vmware.rw_handles [ 1377.005540] env[61962]: DEBUG nova.virt.vmwareapi.images [None req-aeada297-64d9-442f-a2b8-fc450e4cd4d2 tempest-ServersAdminTestJSON-987189382 tempest-ServersAdminTestJSON-987189382-project-member] [instance: 1dcafe3b-989b-4290-be54-ceccc62a323f] Downloaded image file data f684bb17-3ab2-4bd5-a19e-4c36c57d0ebe to vmware_temp/e9f74ca4-fe59-4fb9-9821-06331ed1a6c6/f684bb17-3ab2-4bd5-a19e-4c36c57d0ebe/tmp-sparse.vmdk on the data store datastore2 {{(pid=61962) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:258}} [ 1377.007632] env[61962]: DEBUG nova.virt.vmwareapi.vmops [None req-aeada297-64d9-442f-a2b8-fc450e4cd4d2 tempest-ServersAdminTestJSON-987189382 tempest-ServersAdminTestJSON-987189382-project-member] [instance: 1dcafe3b-989b-4290-be54-ceccc62a323f] Caching image {{(pid=61962) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 1377.007730] env[61962]: DEBUG nova.virt.vmwareapi.vm_util [None req-aeada297-64d9-442f-a2b8-fc450e4cd4d2 tempest-ServersAdminTestJSON-987189382 tempest-ServersAdminTestJSON-987189382-project-member] Copying Virtual Disk [datastore2] vmware_temp/e9f74ca4-fe59-4fb9-9821-06331ed1a6c6/f684bb17-3ab2-4bd5-a19e-4c36c57d0ebe/tmp-sparse.vmdk to [datastore2] vmware_temp/e9f74ca4-fe59-4fb9-9821-06331ed1a6c6/f684bb17-3ab2-4bd5-a19e-4c36c57d0ebe/f684bb17-3ab2-4bd5-a19e-4c36c57d0ebe.vmdk {{(pid=61962) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 1377.008443] env[61962]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-1983ee38-4c1a-4719-a33a-86768e6ba93f {{(pid=61962) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1377.017788] env[61962]: DEBUG oslo_vmware.api [None req-aeada297-64d9-442f-a2b8-fc450e4cd4d2 tempest-ServersAdminTestJSON-987189382 tempest-ServersAdminTestJSON-987189382-project-member] Waiting for the task: (returnval){ [ 1377.017788] env[61962]: value = "task-4892032" [ 1377.017788] env[61962]: _type = "Task" [ 1377.017788] env[61962]: } to complete. {{(pid=61962) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1377.031416] env[61962]: DEBUG oslo_vmware.api [None req-aeada297-64d9-442f-a2b8-fc450e4cd4d2 tempest-ServersAdminTestJSON-987189382 tempest-ServersAdminTestJSON-987189382-project-member] Task: {'id': task-4892032, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61962) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1377.529590] env[61962]: DEBUG oslo_vmware.exceptions [None req-aeada297-64d9-442f-a2b8-fc450e4cd4d2 tempest-ServersAdminTestJSON-987189382 tempest-ServersAdminTestJSON-987189382-project-member] Fault InvalidArgument not matched. {{(pid=61962) get_fault_class /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/exceptions.py:290}} [ 1377.529929] env[61962]: DEBUG oslo_concurrency.lockutils [None req-aeada297-64d9-442f-a2b8-fc450e4cd4d2 tempest-ServersAdminTestJSON-987189382 tempest-ServersAdminTestJSON-987189382-project-member] Releasing lock "[datastore2] devstack-image-cache_base/f684bb17-3ab2-4bd5-a19e-4c36c57d0ebe/f684bb17-3ab2-4bd5-a19e-4c36c57d0ebe.vmdk" {{(pid=61962) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1377.531112] env[61962]: ERROR nova.compute.manager [None req-aeada297-64d9-442f-a2b8-fc450e4cd4d2 tempest-ServersAdminTestJSON-987189382 tempest-ServersAdminTestJSON-987189382-project-member] [instance: 1dcafe3b-989b-4290-be54-ceccc62a323f] Instance failed to spawn: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1377.531112] env[61962]: Faults: ['InvalidArgument'] [ 1377.531112] env[61962]: ERROR nova.compute.manager [instance: 1dcafe3b-989b-4290-be54-ceccc62a323f] Traceback (most recent call last): [ 1377.531112] env[61962]: ERROR nova.compute.manager [instance: 1dcafe3b-989b-4290-be54-ceccc62a323f] File "/opt/stack/nova/nova/compute/manager.py", line 2886, in _build_resources [ 1377.531112] env[61962]: ERROR nova.compute.manager [instance: 1dcafe3b-989b-4290-be54-ceccc62a323f] yield resources [ 1377.531112] env[61962]: ERROR nova.compute.manager [instance: 1dcafe3b-989b-4290-be54-ceccc62a323f] File "/opt/stack/nova/nova/compute/manager.py", line 2633, in _build_and_run_instance [ 1377.531112] env[61962]: ERROR nova.compute.manager [instance: 1dcafe3b-989b-4290-be54-ceccc62a323f] self.driver.spawn(context, instance, image_meta, [ 1377.531112] env[61962]: ERROR nova.compute.manager [instance: 1dcafe3b-989b-4290-be54-ceccc62a323f] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 1377.531112] env[61962]: ERROR nova.compute.manager [instance: 1dcafe3b-989b-4290-be54-ceccc62a323f] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1377.531112] env[61962]: ERROR nova.compute.manager [instance: 1dcafe3b-989b-4290-be54-ceccc62a323f] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 1377.531112] env[61962]: ERROR nova.compute.manager [instance: 1dcafe3b-989b-4290-be54-ceccc62a323f] self._fetch_image_if_missing(context, vi) [ 1377.531112] env[61962]: ERROR nova.compute.manager [instance: 1dcafe3b-989b-4290-be54-ceccc62a323f] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 1377.531450] env[61962]: ERROR nova.compute.manager [instance: 1dcafe3b-989b-4290-be54-ceccc62a323f] image_cache(vi, tmp_image_ds_loc) [ 1377.531450] env[61962]: ERROR nova.compute.manager [instance: 1dcafe3b-989b-4290-be54-ceccc62a323f] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 1377.531450] env[61962]: ERROR nova.compute.manager [instance: 1dcafe3b-989b-4290-be54-ceccc62a323f] vm_util.copy_virtual_disk( [ 1377.531450] env[61962]: ERROR nova.compute.manager [instance: 1dcafe3b-989b-4290-be54-ceccc62a323f] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 1377.531450] env[61962]: ERROR nova.compute.manager [instance: 1dcafe3b-989b-4290-be54-ceccc62a323f] session._wait_for_task(vmdk_copy_task) [ 1377.531450] env[61962]: ERROR nova.compute.manager [instance: 1dcafe3b-989b-4290-be54-ceccc62a323f] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 1377.531450] env[61962]: ERROR nova.compute.manager [instance: 1dcafe3b-989b-4290-be54-ceccc62a323f] return self.wait_for_task(task_ref) [ 1377.531450] env[61962]: ERROR nova.compute.manager [instance: 1dcafe3b-989b-4290-be54-ceccc62a323f] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 1377.531450] env[61962]: ERROR nova.compute.manager [instance: 1dcafe3b-989b-4290-be54-ceccc62a323f] return evt.wait() [ 1377.531450] env[61962]: ERROR nova.compute.manager [instance: 1dcafe3b-989b-4290-be54-ceccc62a323f] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 1377.531450] env[61962]: ERROR nova.compute.manager [instance: 1dcafe3b-989b-4290-be54-ceccc62a323f] result = hub.switch() [ 1377.531450] env[61962]: ERROR nova.compute.manager [instance: 1dcafe3b-989b-4290-be54-ceccc62a323f] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 1377.531450] env[61962]: ERROR nova.compute.manager [instance: 1dcafe3b-989b-4290-be54-ceccc62a323f] return self.greenlet.switch() [ 1377.531743] env[61962]: ERROR nova.compute.manager [instance: 1dcafe3b-989b-4290-be54-ceccc62a323f] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 1377.531743] env[61962]: ERROR nova.compute.manager [instance: 1dcafe3b-989b-4290-be54-ceccc62a323f] self.f(*self.args, **self.kw) [ 1377.531743] env[61962]: ERROR nova.compute.manager [instance: 1dcafe3b-989b-4290-be54-ceccc62a323f] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 1377.531743] env[61962]: ERROR nova.compute.manager [instance: 1dcafe3b-989b-4290-be54-ceccc62a323f] raise exceptions.translate_fault(task_info.error) [ 1377.531743] env[61962]: ERROR nova.compute.manager [instance: 1dcafe3b-989b-4290-be54-ceccc62a323f] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1377.531743] env[61962]: ERROR nova.compute.manager [instance: 1dcafe3b-989b-4290-be54-ceccc62a323f] Faults: ['InvalidArgument'] [ 1377.531743] env[61962]: ERROR nova.compute.manager [instance: 1dcafe3b-989b-4290-be54-ceccc62a323f] [ 1377.531743] env[61962]: INFO nova.compute.manager [None req-aeada297-64d9-442f-a2b8-fc450e4cd4d2 tempest-ServersAdminTestJSON-987189382 tempest-ServersAdminTestJSON-987189382-project-member] [instance: 1dcafe3b-989b-4290-be54-ceccc62a323f] Terminating instance [ 1377.533282] env[61962]: DEBUG oslo_concurrency.lockutils [None req-e1d514b2-824e-487e-a10a-f30e148f321b tempest-AttachInterfacesTestJSON-87713823 tempest-AttachInterfacesTestJSON-87713823-project-member] Acquired lock "[datastore2] devstack-image-cache_base/f684bb17-3ab2-4bd5-a19e-4c36c57d0ebe/f684bb17-3ab2-4bd5-a19e-4c36c57d0ebe.vmdk" {{(pid=61962) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1377.533369] env[61962]: DEBUG nova.virt.vmwareapi.ds_util [None req-e1d514b2-824e-487e-a10a-f30e148f321b tempest-AttachInterfacesTestJSON-87713823 tempest-AttachInterfacesTestJSON-87713823-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=61962) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1377.533838] env[61962]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-d24d00ba-f181-4044-9370-e67660359019 {{(pid=61962) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1377.536617] env[61962]: DEBUG nova.compute.manager [None req-aeada297-64d9-442f-a2b8-fc450e4cd4d2 tempest-ServersAdminTestJSON-987189382 tempest-ServersAdminTestJSON-987189382-project-member] [instance: 1dcafe3b-989b-4290-be54-ceccc62a323f] Start destroying the instance on the hypervisor. {{(pid=61962) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3142}} [ 1377.536811] env[61962]: DEBUG nova.virt.vmwareapi.vmops [None req-aeada297-64d9-442f-a2b8-fc450e4cd4d2 tempest-ServersAdminTestJSON-987189382 tempest-ServersAdminTestJSON-987189382-project-member] [instance: 1dcafe3b-989b-4290-be54-ceccc62a323f] Destroying instance {{(pid=61962) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1377.537793] env[61962]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b27cb8d2-4ee0-4107-9cf9-ea136bd61acc {{(pid=61962) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1377.548034] env[61962]: DEBUG nova.virt.vmwareapi.vmops [None req-aeada297-64d9-442f-a2b8-fc450e4cd4d2 tempest-ServersAdminTestJSON-987189382 tempest-ServersAdminTestJSON-987189382-project-member] [instance: 1dcafe3b-989b-4290-be54-ceccc62a323f] Unregistering the VM {{(pid=61962) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 1377.549293] env[61962]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-95fc6a8c-f0f5-445c-a792-dcf0e6901bed {{(pid=61962) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1377.550960] env[61962]: DEBUG nova.virt.vmwareapi.ds_util [None req-e1d514b2-824e-487e-a10a-f30e148f321b tempest-AttachInterfacesTestJSON-87713823 tempest-AttachInterfacesTestJSON-87713823-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=61962) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1377.551507] env[61962]: DEBUG nova.virt.vmwareapi.vmops [None req-e1d514b2-824e-487e-a10a-f30e148f321b tempest-AttachInterfacesTestJSON-87713823 tempest-AttachInterfacesTestJSON-87713823-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=61962) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 1377.552068] env[61962]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-7f37c93e-bc0a-47b9-a955-18db765bcb7b {{(pid=61962) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1377.558806] env[61962]: DEBUG oslo_vmware.api [None req-e1d514b2-824e-487e-a10a-f30e148f321b tempest-AttachInterfacesTestJSON-87713823 tempest-AttachInterfacesTestJSON-87713823-project-member] Waiting for the task: (returnval){ [ 1377.558806] env[61962]: value = "session[5210918f-aadc-9b29-42fa-0929c2e42627]526c109c-6571-ff67-2f3a-d300ccafb6d6" [ 1377.558806] env[61962]: _type = "Task" [ 1377.558806] env[61962]: } to complete. {{(pid=61962) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1377.571212] env[61962]: DEBUG oslo_vmware.api [None req-e1d514b2-824e-487e-a10a-f30e148f321b tempest-AttachInterfacesTestJSON-87713823 tempest-AttachInterfacesTestJSON-87713823-project-member] Task: {'id': session[5210918f-aadc-9b29-42fa-0929c2e42627]526c109c-6571-ff67-2f3a-d300ccafb6d6, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61962) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1377.627868] env[61962]: DEBUG nova.virt.vmwareapi.vmops [None req-aeada297-64d9-442f-a2b8-fc450e4cd4d2 tempest-ServersAdminTestJSON-987189382 tempest-ServersAdminTestJSON-987189382-project-member] [instance: 1dcafe3b-989b-4290-be54-ceccc62a323f] Unregistered the VM {{(pid=61962) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 1377.628111] env[61962]: DEBUG nova.virt.vmwareapi.vmops [None req-aeada297-64d9-442f-a2b8-fc450e4cd4d2 tempest-ServersAdminTestJSON-987189382 tempest-ServersAdminTestJSON-987189382-project-member] [instance: 1dcafe3b-989b-4290-be54-ceccc62a323f] Deleting contents of the VM from datastore datastore2 {{(pid=61962) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 1377.628350] env[61962]: DEBUG nova.virt.vmwareapi.ds_util [None req-aeada297-64d9-442f-a2b8-fc450e4cd4d2 tempest-ServersAdminTestJSON-987189382 tempest-ServersAdminTestJSON-987189382-project-member] Deleting the datastore file [datastore2] 1dcafe3b-989b-4290-be54-ceccc62a323f {{(pid=61962) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1377.628650] env[61962]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-bd667855-4cfe-4bc7-9e8d-2cd1cd56ac64 {{(pid=61962) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1377.637605] env[61962]: DEBUG oslo_vmware.api [None req-aeada297-64d9-442f-a2b8-fc450e4cd4d2 tempest-ServersAdminTestJSON-987189382 tempest-ServersAdminTestJSON-987189382-project-member] Waiting for the task: (returnval){ [ 1377.637605] env[61962]: value = "task-4892034" [ 1377.637605] env[61962]: _type = "Task" [ 1377.637605] env[61962]: } to complete. {{(pid=61962) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1377.646826] env[61962]: DEBUG oslo_vmware.api [None req-aeada297-64d9-442f-a2b8-fc450e4cd4d2 tempest-ServersAdminTestJSON-987189382 tempest-ServersAdminTestJSON-987189382-project-member] Task: {'id': task-4892034, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61962) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1378.068832] env[61962]: DEBUG nova.virt.vmwareapi.vmops [None req-e1d514b2-824e-487e-a10a-f30e148f321b tempest-AttachInterfacesTestJSON-87713823 tempest-AttachInterfacesTestJSON-87713823-project-member] [instance: 454eebcc-ea18-4046-ba2e-01de1c8a954a] Preparing fetch location {{(pid=61962) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 1378.069126] env[61962]: DEBUG nova.virt.vmwareapi.ds_util [None req-e1d514b2-824e-487e-a10a-f30e148f321b tempest-AttachInterfacesTestJSON-87713823 tempest-AttachInterfacesTestJSON-87713823-project-member] Creating directory with path [datastore2] vmware_temp/3403cac4-06b4-4fcd-9a66-c0c13ef50e54/f684bb17-3ab2-4bd5-a19e-4c36c57d0ebe {{(pid=61962) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1378.069418] env[61962]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-6079893e-47cf-49be-bf49-28857385cd22 {{(pid=61962) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1378.083254] env[61962]: DEBUG nova.virt.vmwareapi.ds_util [None req-e1d514b2-824e-487e-a10a-f30e148f321b tempest-AttachInterfacesTestJSON-87713823 tempest-AttachInterfacesTestJSON-87713823-project-member] Created directory with path [datastore2] vmware_temp/3403cac4-06b4-4fcd-9a66-c0c13ef50e54/f684bb17-3ab2-4bd5-a19e-4c36c57d0ebe {{(pid=61962) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1378.083468] env[61962]: DEBUG nova.virt.vmwareapi.vmops [None req-e1d514b2-824e-487e-a10a-f30e148f321b tempest-AttachInterfacesTestJSON-87713823 tempest-AttachInterfacesTestJSON-87713823-project-member] [instance: 454eebcc-ea18-4046-ba2e-01de1c8a954a] Fetch image to [datastore2] vmware_temp/3403cac4-06b4-4fcd-9a66-c0c13ef50e54/f684bb17-3ab2-4bd5-a19e-4c36c57d0ebe/tmp-sparse.vmdk {{(pid=61962) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 1378.083643] env[61962]: DEBUG nova.virt.vmwareapi.vmops [None req-e1d514b2-824e-487e-a10a-f30e148f321b tempest-AttachInterfacesTestJSON-87713823 tempest-AttachInterfacesTestJSON-87713823-project-member] [instance: 454eebcc-ea18-4046-ba2e-01de1c8a954a] Downloading image file data f684bb17-3ab2-4bd5-a19e-4c36c57d0ebe to [datastore2] vmware_temp/3403cac4-06b4-4fcd-9a66-c0c13ef50e54/f684bb17-3ab2-4bd5-a19e-4c36c57d0ebe/tmp-sparse.vmdk on the data store datastore2 {{(pid=61962) _fetch_image_as_file /opt/stack/nova/nova/virt/vmwareapi/vmops.py:399}} [ 1378.084527] env[61962]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8e84c66b-ca56-4e66-bac7-c562bf689d25 {{(pid=61962) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1378.092739] env[61962]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b0b51d4b-f026-4210-bd56-efc039354923 {{(pid=61962) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1378.103181] env[61962]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3e513601-0374-46ea-aac8-7876ace31552 {{(pid=61962) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1378.136162] env[61962]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-eb8be77d-4fbf-47fe-8cfa-e36c7d7fe9de {{(pid=61962) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1378.145569] env[61962]: DEBUG oslo_vmware.service [-] Invoking SessionManager.AcquireGenericServiceTicket with opID=oslo.vmware-11b3c429-0dcd-43fd-9a0d-5f2d7f27075e {{(pid=61962) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1378.148187] env[61962]: DEBUG oslo_vmware.api [None req-aeada297-64d9-442f-a2b8-fc450e4cd4d2 tempest-ServersAdminTestJSON-987189382 tempest-ServersAdminTestJSON-987189382-project-member] Task: {'id': task-4892034, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.111842} completed successfully. {{(pid=61962) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1378.149180] env[61962]: DEBUG nova.virt.vmwareapi.ds_util [None req-aeada297-64d9-442f-a2b8-fc450e4cd4d2 tempest-ServersAdminTestJSON-987189382 tempest-ServersAdminTestJSON-987189382-project-member] Deleted the datastore file {{(pid=61962) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1378.149180] env[61962]: DEBUG nova.virt.vmwareapi.vmops [None req-aeada297-64d9-442f-a2b8-fc450e4cd4d2 tempest-ServersAdminTestJSON-987189382 tempest-ServersAdminTestJSON-987189382-project-member] [instance: 1dcafe3b-989b-4290-be54-ceccc62a323f] Deleted contents of the VM from datastore datastore2 {{(pid=61962) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 1378.149180] env[61962]: DEBUG nova.virt.vmwareapi.vmops [None req-aeada297-64d9-442f-a2b8-fc450e4cd4d2 tempest-ServersAdminTestJSON-987189382 tempest-ServersAdminTestJSON-987189382-project-member] [instance: 1dcafe3b-989b-4290-be54-ceccc62a323f] Instance destroyed {{(pid=61962) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1378.149273] env[61962]: INFO nova.compute.manager [None req-aeada297-64d9-442f-a2b8-fc450e4cd4d2 tempest-ServersAdminTestJSON-987189382 tempest-ServersAdminTestJSON-987189382-project-member] [instance: 1dcafe3b-989b-4290-be54-ceccc62a323f] Took 0.61 seconds to destroy the instance on the hypervisor. [ 1378.153732] env[61962]: DEBUG nova.compute.claims [None req-aeada297-64d9-442f-a2b8-fc450e4cd4d2 tempest-ServersAdminTestJSON-987189382 tempest-ServersAdminTestJSON-987189382-project-member] [instance: 1dcafe3b-989b-4290-be54-ceccc62a323f] Aborting claim: {{(pid=61962) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 1378.154037] env[61962]: DEBUG oslo_concurrency.lockutils [None req-aeada297-64d9-442f-a2b8-fc450e4cd4d2 tempest-ServersAdminTestJSON-987189382 tempest-ServersAdminTestJSON-987189382-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=61962) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1378.154212] env[61962]: DEBUG oslo_concurrency.lockutils [None req-aeada297-64d9-442f-a2b8-fc450e4cd4d2 tempest-ServersAdminTestJSON-987189382 tempest-ServersAdminTestJSON-987189382-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 0.000s {{(pid=61962) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1378.170925] env[61962]: DEBUG nova.virt.vmwareapi.images [None req-e1d514b2-824e-487e-a10a-f30e148f321b tempest-AttachInterfacesTestJSON-87713823 tempest-AttachInterfacesTestJSON-87713823-project-member] [instance: 454eebcc-ea18-4046-ba2e-01de1c8a954a] Downloading image file data f684bb17-3ab2-4bd5-a19e-4c36c57d0ebe to the data store datastore2 {{(pid=61962) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:245}} [ 1378.239859] env[61962]: DEBUG oslo_vmware.rw_handles [None req-e1d514b2-824e-487e-a10a-f30e148f321b tempest-AttachInterfacesTestJSON-87713823 tempest-AttachInterfacesTestJSON-87713823-project-member] Creating HTTP connection to write to file with size = 21318656 and URL = https://esx7c2n3.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/3403cac4-06b4-4fcd-9a66-c0c13ef50e54/f684bb17-3ab2-4bd5-a19e-4c36c57d0ebe/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=61962) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 1378.296755] env[61962]: DEBUG oslo_vmware.rw_handles [None req-e1d514b2-824e-487e-a10a-f30e148f321b tempest-AttachInterfacesTestJSON-87713823 tempest-AttachInterfacesTestJSON-87713823-project-member] Completed reading data from the image iterator. {{(pid=61962) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 1378.296943] env[61962]: DEBUG oslo_vmware.rw_handles [None req-e1d514b2-824e-487e-a10a-f30e148f321b tempest-AttachInterfacesTestJSON-87713823 tempest-AttachInterfacesTestJSON-87713823-project-member] Closing write handle for https://esx7c2n3.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/3403cac4-06b4-4fcd-9a66-c0c13ef50e54/f684bb17-3ab2-4bd5-a19e-4c36c57d0ebe/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=61962) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:281}} [ 1378.388476] env[61962]: DEBUG oslo_concurrency.lockutils [None req-120692ab-bf28-47fb-a63c-11dfebf4eb15 tempest-ServerTagsTestJSON-1371681020 tempest-ServerTagsTestJSON-1371681020-project-member] Acquiring lock "4de50eaf-ee14-4aee-b5c0-d08dc5fc8010" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61962) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1378.388707] env[61962]: DEBUG oslo_concurrency.lockutils [None req-120692ab-bf28-47fb-a63c-11dfebf4eb15 tempest-ServerTagsTestJSON-1371681020 tempest-ServerTagsTestJSON-1371681020-project-member] Lock "4de50eaf-ee14-4aee-b5c0-d08dc5fc8010" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61962) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1378.530474] env[61962]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cdc3e4eb-3fa5-4b61-9882-7e53a227c738 {{(pid=61962) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1378.539140] env[61962]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-60113dff-b5c4-4aca-b025-e4c232fc108b {{(pid=61962) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1378.569754] env[61962]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4b710018-4d41-4bfb-9e92-d94ec19bd517 {{(pid=61962) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1378.577803] env[61962]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d0e60905-a419-40ac-b082-a354a2d22fbd {{(pid=61962) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1378.593467] env[61962]: DEBUG nova.compute.provider_tree [None req-aeada297-64d9-442f-a2b8-fc450e4cd4d2 tempest-ServersAdminTestJSON-987189382 tempest-ServersAdminTestJSON-987189382-project-member] Inventory has not changed in ProviderTree for provider: 5a20dc57-fddd-49ec-bab5-953a03eebaa1 {{(pid=61962) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1378.602801] env[61962]: DEBUG nova.scheduler.client.report [None req-aeada297-64d9-442f-a2b8-fc450e4cd4d2 tempest-ServersAdminTestJSON-987189382 tempest-ServersAdminTestJSON-987189382-project-member] Inventory has not changed for provider 5a20dc57-fddd-49ec-bab5-953a03eebaa1 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 96, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61962) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1378.621413] env[61962]: DEBUG oslo_concurrency.lockutils [None req-aeada297-64d9-442f-a2b8-fc450e4cd4d2 tempest-ServersAdminTestJSON-987189382 tempest-ServersAdminTestJSON-987189382-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 0.467s {{(pid=61962) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1378.621962] env[61962]: ERROR nova.compute.manager [None req-aeada297-64d9-442f-a2b8-fc450e4cd4d2 tempest-ServersAdminTestJSON-987189382 tempest-ServersAdminTestJSON-987189382-project-member] [instance: 1dcafe3b-989b-4290-be54-ceccc62a323f] Failed to build and run instance: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1378.621962] env[61962]: Faults: ['InvalidArgument'] [ 1378.621962] env[61962]: ERROR nova.compute.manager [instance: 1dcafe3b-989b-4290-be54-ceccc62a323f] Traceback (most recent call last): [ 1378.621962] env[61962]: ERROR nova.compute.manager [instance: 1dcafe3b-989b-4290-be54-ceccc62a323f] File "/opt/stack/nova/nova/compute/manager.py", line 2633, in _build_and_run_instance [ 1378.621962] env[61962]: ERROR nova.compute.manager [instance: 1dcafe3b-989b-4290-be54-ceccc62a323f] self.driver.spawn(context, instance, image_meta, [ 1378.621962] env[61962]: ERROR nova.compute.manager [instance: 1dcafe3b-989b-4290-be54-ceccc62a323f] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 1378.621962] env[61962]: ERROR nova.compute.manager [instance: 1dcafe3b-989b-4290-be54-ceccc62a323f] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1378.621962] env[61962]: ERROR nova.compute.manager [instance: 1dcafe3b-989b-4290-be54-ceccc62a323f] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 1378.621962] env[61962]: ERROR nova.compute.manager [instance: 1dcafe3b-989b-4290-be54-ceccc62a323f] self._fetch_image_if_missing(context, vi) [ 1378.621962] env[61962]: ERROR nova.compute.manager [instance: 1dcafe3b-989b-4290-be54-ceccc62a323f] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 1378.621962] env[61962]: ERROR nova.compute.manager [instance: 1dcafe3b-989b-4290-be54-ceccc62a323f] image_cache(vi, tmp_image_ds_loc) [ 1378.621962] env[61962]: ERROR nova.compute.manager [instance: 1dcafe3b-989b-4290-be54-ceccc62a323f] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 1378.622284] env[61962]: ERROR nova.compute.manager [instance: 1dcafe3b-989b-4290-be54-ceccc62a323f] vm_util.copy_virtual_disk( [ 1378.622284] env[61962]: ERROR nova.compute.manager [instance: 1dcafe3b-989b-4290-be54-ceccc62a323f] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 1378.622284] env[61962]: ERROR nova.compute.manager [instance: 1dcafe3b-989b-4290-be54-ceccc62a323f] session._wait_for_task(vmdk_copy_task) [ 1378.622284] env[61962]: ERROR nova.compute.manager [instance: 1dcafe3b-989b-4290-be54-ceccc62a323f] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 1378.622284] env[61962]: ERROR nova.compute.manager [instance: 1dcafe3b-989b-4290-be54-ceccc62a323f] return self.wait_for_task(task_ref) [ 1378.622284] env[61962]: ERROR nova.compute.manager [instance: 1dcafe3b-989b-4290-be54-ceccc62a323f] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 1378.622284] env[61962]: ERROR nova.compute.manager [instance: 1dcafe3b-989b-4290-be54-ceccc62a323f] return evt.wait() [ 1378.622284] env[61962]: ERROR nova.compute.manager [instance: 1dcafe3b-989b-4290-be54-ceccc62a323f] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 1378.622284] env[61962]: ERROR nova.compute.manager [instance: 1dcafe3b-989b-4290-be54-ceccc62a323f] result = hub.switch() [ 1378.622284] env[61962]: ERROR nova.compute.manager [instance: 1dcafe3b-989b-4290-be54-ceccc62a323f] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 1378.622284] env[61962]: ERROR nova.compute.manager [instance: 1dcafe3b-989b-4290-be54-ceccc62a323f] return self.greenlet.switch() [ 1378.622284] env[61962]: ERROR nova.compute.manager [instance: 1dcafe3b-989b-4290-be54-ceccc62a323f] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 1378.622284] env[61962]: ERROR nova.compute.manager [instance: 1dcafe3b-989b-4290-be54-ceccc62a323f] self.f(*self.args, **self.kw) [ 1378.622635] env[61962]: ERROR nova.compute.manager [instance: 1dcafe3b-989b-4290-be54-ceccc62a323f] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 1378.622635] env[61962]: ERROR nova.compute.manager [instance: 1dcafe3b-989b-4290-be54-ceccc62a323f] raise exceptions.translate_fault(task_info.error) [ 1378.622635] env[61962]: ERROR nova.compute.manager [instance: 1dcafe3b-989b-4290-be54-ceccc62a323f] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1378.622635] env[61962]: ERROR nova.compute.manager [instance: 1dcafe3b-989b-4290-be54-ceccc62a323f] Faults: ['InvalidArgument'] [ 1378.622635] env[61962]: ERROR nova.compute.manager [instance: 1dcafe3b-989b-4290-be54-ceccc62a323f] [ 1378.622752] env[61962]: DEBUG nova.compute.utils [None req-aeada297-64d9-442f-a2b8-fc450e4cd4d2 tempest-ServersAdminTestJSON-987189382 tempest-ServersAdminTestJSON-987189382-project-member] [instance: 1dcafe3b-989b-4290-be54-ceccc62a323f] VimFaultException {{(pid=61962) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 1378.624434] env[61962]: DEBUG nova.compute.manager [None req-aeada297-64d9-442f-a2b8-fc450e4cd4d2 tempest-ServersAdminTestJSON-987189382 tempest-ServersAdminTestJSON-987189382-project-member] [instance: 1dcafe3b-989b-4290-be54-ceccc62a323f] Build of instance 1dcafe3b-989b-4290-be54-ceccc62a323f was re-scheduled: A specified parameter was not correct: fileType [ 1378.624434] env[61962]: Faults: ['InvalidArgument'] {{(pid=61962) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2472}} [ 1378.624819] env[61962]: DEBUG nova.compute.manager [None req-aeada297-64d9-442f-a2b8-fc450e4cd4d2 tempest-ServersAdminTestJSON-987189382 tempest-ServersAdminTestJSON-987189382-project-member] [instance: 1dcafe3b-989b-4290-be54-ceccc62a323f] Unplugging VIFs for instance {{(pid=61962) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:2998}} [ 1378.624983] env[61962]: DEBUG nova.compute.manager [None req-aeada297-64d9-442f-a2b8-fc450e4cd4d2 tempest-ServersAdminTestJSON-987189382 tempest-ServersAdminTestJSON-987189382-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=61962) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3021}} [ 1378.625167] env[61962]: DEBUG nova.compute.manager [None req-aeada297-64d9-442f-a2b8-fc450e4cd4d2 tempest-ServersAdminTestJSON-987189382 tempest-ServersAdminTestJSON-987189382-project-member] [instance: 1dcafe3b-989b-4290-be54-ceccc62a323f] Deallocating network for instance {{(pid=61962) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2281}} [ 1378.625350] env[61962]: DEBUG nova.network.neutron [None req-aeada297-64d9-442f-a2b8-fc450e4cd4d2 tempest-ServersAdminTestJSON-987189382 tempest-ServersAdminTestJSON-987189382-project-member] [instance: 1dcafe3b-989b-4290-be54-ceccc62a323f] deallocate_for_instance() {{(pid=61962) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1379.036125] env[61962]: DEBUG nova.network.neutron [None req-aeada297-64d9-442f-a2b8-fc450e4cd4d2 tempest-ServersAdminTestJSON-987189382 tempest-ServersAdminTestJSON-987189382-project-member] [instance: 1dcafe3b-989b-4290-be54-ceccc62a323f] Updating instance_info_cache with network_info: [] {{(pid=61962) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1379.055325] env[61962]: INFO nova.compute.manager [None req-aeada297-64d9-442f-a2b8-fc450e4cd4d2 tempest-ServersAdminTestJSON-987189382 tempest-ServersAdminTestJSON-987189382-project-member] [instance: 1dcafe3b-989b-4290-be54-ceccc62a323f] Took 0.43 seconds to deallocate network for instance. [ 1379.173746] env[61962]: INFO nova.scheduler.client.report [None req-aeada297-64d9-442f-a2b8-fc450e4cd4d2 tempest-ServersAdminTestJSON-987189382 tempest-ServersAdminTestJSON-987189382-project-member] Deleted allocations for instance 1dcafe3b-989b-4290-be54-ceccc62a323f [ 1379.200567] env[61962]: DEBUG oslo_concurrency.lockutils [None req-aeada297-64d9-442f-a2b8-fc450e4cd4d2 tempest-ServersAdminTestJSON-987189382 tempest-ServersAdminTestJSON-987189382-project-member] Lock "1dcafe3b-989b-4290-be54-ceccc62a323f" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 671.986s {{(pid=61962) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1379.201739] env[61962]: DEBUG oslo_concurrency.lockutils [None req-380688c5-7486-432d-88a2-312775180ffc tempest-ServersAdminTestJSON-987189382 tempest-ServersAdminTestJSON-987189382-project-member] Lock "1dcafe3b-989b-4290-be54-ceccc62a323f" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 474.373s {{(pid=61962) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1379.204242] env[61962]: DEBUG oslo_concurrency.lockutils [None req-380688c5-7486-432d-88a2-312775180ffc tempest-ServersAdminTestJSON-987189382 tempest-ServersAdminTestJSON-987189382-project-member] Acquiring lock "1dcafe3b-989b-4290-be54-ceccc62a323f-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=61962) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1379.204242] env[61962]: DEBUG oslo_concurrency.lockutils [None req-380688c5-7486-432d-88a2-312775180ffc tempest-ServersAdminTestJSON-987189382 tempest-ServersAdminTestJSON-987189382-project-member] Lock "1dcafe3b-989b-4290-be54-ceccc62a323f-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=61962) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1379.204242] env[61962]: DEBUG oslo_concurrency.lockutils [None req-380688c5-7486-432d-88a2-312775180ffc tempest-ServersAdminTestJSON-987189382 tempest-ServersAdminTestJSON-987189382-project-member] Lock "1dcafe3b-989b-4290-be54-ceccc62a323f-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=61962) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1379.205224] env[61962]: INFO nova.compute.manager [None req-380688c5-7486-432d-88a2-312775180ffc tempest-ServersAdminTestJSON-987189382 tempest-ServersAdminTestJSON-987189382-project-member] [instance: 1dcafe3b-989b-4290-be54-ceccc62a323f] Terminating instance [ 1379.207040] env[61962]: DEBUG nova.compute.manager [None req-380688c5-7486-432d-88a2-312775180ffc tempest-ServersAdminTestJSON-987189382 tempest-ServersAdminTestJSON-987189382-project-member] [instance: 1dcafe3b-989b-4290-be54-ceccc62a323f] Start destroying the instance on the hypervisor. {{(pid=61962) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3142}} [ 1379.207265] env[61962]: DEBUG nova.virt.vmwareapi.vmops [None req-380688c5-7486-432d-88a2-312775180ffc tempest-ServersAdminTestJSON-987189382 tempest-ServersAdminTestJSON-987189382-project-member] [instance: 1dcafe3b-989b-4290-be54-ceccc62a323f] Destroying instance {{(pid=61962) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1379.208066] env[61962]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-4c98a2dd-c8ec-4212-82d2-e2408205942a {{(pid=61962) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1379.218478] env[61962]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1ec206e6-b549-404b-88fe-abe25729d8e5 {{(pid=61962) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1379.230208] env[61962]: DEBUG nova.compute.manager [None req-7244c23f-3af8-45ba-bfca-0a8256fd126d tempest-DeleteServersAdminTestJSON-710750285 tempest-DeleteServersAdminTestJSON-710750285-project-member] [instance: 75aed9fe-6947-46b3-9c7d-838bf0f108af] Starting instance... {{(pid=61962) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2424}} [ 1379.254483] env[61962]: WARNING nova.virt.vmwareapi.vmops [None req-380688c5-7486-432d-88a2-312775180ffc tempest-ServersAdminTestJSON-987189382 tempest-ServersAdminTestJSON-987189382-project-member] [instance: 1dcafe3b-989b-4290-be54-ceccc62a323f] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 1dcafe3b-989b-4290-be54-ceccc62a323f could not be found. [ 1379.254711] env[61962]: DEBUG nova.virt.vmwareapi.vmops [None req-380688c5-7486-432d-88a2-312775180ffc tempest-ServersAdminTestJSON-987189382 tempest-ServersAdminTestJSON-987189382-project-member] [instance: 1dcafe3b-989b-4290-be54-ceccc62a323f] Instance destroyed {{(pid=61962) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1379.254894] env[61962]: INFO nova.compute.manager [None req-380688c5-7486-432d-88a2-312775180ffc tempest-ServersAdminTestJSON-987189382 tempest-ServersAdminTestJSON-987189382-project-member] [instance: 1dcafe3b-989b-4290-be54-ceccc62a323f] Took 0.05 seconds to destroy the instance on the hypervisor. [ 1379.255167] env[61962]: DEBUG oslo.service.loopingcall [None req-380688c5-7486-432d-88a2-312775180ffc tempest-ServersAdminTestJSON-987189382 tempest-ServersAdminTestJSON-987189382-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61962) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1379.255433] env[61962]: DEBUG nova.compute.manager [-] [instance: 1dcafe3b-989b-4290-be54-ceccc62a323f] Deallocating network for instance {{(pid=61962) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2281}} [ 1379.255538] env[61962]: DEBUG nova.network.neutron [-] [instance: 1dcafe3b-989b-4290-be54-ceccc62a323f] deallocate_for_instance() {{(pid=61962) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1379.266482] env[61962]: DEBUG nova.compute.manager [None req-7244c23f-3af8-45ba-bfca-0a8256fd126d tempest-DeleteServersAdminTestJSON-710750285 tempest-DeleteServersAdminTestJSON-710750285-project-member] [instance: 75aed9fe-6947-46b3-9c7d-838bf0f108af] Instance disappeared before build. {{(pid=61962) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2431}} [ 1379.283752] env[61962]: DEBUG nova.network.neutron [-] [instance: 1dcafe3b-989b-4290-be54-ceccc62a323f] Updating instance_info_cache with network_info: [] {{(pid=61962) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1379.290738] env[61962]: DEBUG oslo_concurrency.lockutils [None req-7244c23f-3af8-45ba-bfca-0a8256fd126d tempest-DeleteServersAdminTestJSON-710750285 tempest-DeleteServersAdminTestJSON-710750285-project-member] Lock "75aed9fe-6947-46b3-9c7d-838bf0f108af" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 203.093s {{(pid=61962) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1379.293031] env[61962]: INFO nova.compute.manager [-] [instance: 1dcafe3b-989b-4290-be54-ceccc62a323f] Took 0.04 seconds to deallocate network for instance. [ 1379.306054] env[61962]: DEBUG nova.compute.manager [None req-5bb45e03-0340-4574-a19b-822b205eb001 tempest-MultipleCreateTestJSON-937113120 tempest-MultipleCreateTestJSON-937113120-project-member] [instance: 0b089824-a1ec-476a-932a-6cba27a09e4e] Starting instance... {{(pid=61962) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2424}} [ 1379.333282] env[61962]: DEBUG nova.compute.manager [None req-5bb45e03-0340-4574-a19b-822b205eb001 tempest-MultipleCreateTestJSON-937113120 tempest-MultipleCreateTestJSON-937113120-project-member] [instance: 0b089824-a1ec-476a-932a-6cba27a09e4e] Instance disappeared before build. {{(pid=61962) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2431}} [ 1379.361299] env[61962]: DEBUG oslo_concurrency.lockutils [None req-5bb45e03-0340-4574-a19b-822b205eb001 tempest-MultipleCreateTestJSON-937113120 tempest-MultipleCreateTestJSON-937113120-project-member] Lock "0b089824-a1ec-476a-932a-6cba27a09e4e" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 202.650s {{(pid=61962) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1379.387805] env[61962]: DEBUG nova.compute.manager [None req-5bb45e03-0340-4574-a19b-822b205eb001 tempest-MultipleCreateTestJSON-937113120 tempest-MultipleCreateTestJSON-937113120-project-member] [instance: 0fd2335c-e9e9-41bb-b77a-22b6f2c47946] Starting instance... {{(pid=61962) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2424}} [ 1379.458528] env[61962]: DEBUG nova.compute.manager [None req-5bb45e03-0340-4574-a19b-822b205eb001 tempest-MultipleCreateTestJSON-937113120 tempest-MultipleCreateTestJSON-937113120-project-member] [instance: 0fd2335c-e9e9-41bb-b77a-22b6f2c47946] Instance disappeared before build. {{(pid=61962) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2431}} [ 1379.463743] env[61962]: DEBUG oslo_concurrency.lockutils [None req-380688c5-7486-432d-88a2-312775180ffc tempest-ServersAdminTestJSON-987189382 tempest-ServersAdminTestJSON-987189382-project-member] Lock "1dcafe3b-989b-4290-be54-ceccc62a323f" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 0.261s {{(pid=61962) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1379.464697] env[61962]: DEBUG oslo_concurrency.lockutils [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Lock "1dcafe3b-989b-4290-be54-ceccc62a323f" acquired by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: waited 248.947s {{(pid=61962) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1379.464895] env[61962]: INFO nova.compute.manager [None req-ba337279-4320-40ca-b616-7e590432f203 None None] [instance: 1dcafe3b-989b-4290-be54-ceccc62a323f] During sync_power_state the instance has a pending task (deleting). Skip. [ 1379.465081] env[61962]: DEBUG oslo_concurrency.lockutils [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Lock "1dcafe3b-989b-4290-be54-ceccc62a323f" "released" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: held 0.001s {{(pid=61962) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1379.485565] env[61962]: DEBUG oslo_concurrency.lockutils [None req-5bb45e03-0340-4574-a19b-822b205eb001 tempest-MultipleCreateTestJSON-937113120 tempest-MultipleCreateTestJSON-937113120-project-member] Lock "0fd2335c-e9e9-41bb-b77a-22b6f2c47946" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 202.750s {{(pid=61962) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1379.494576] env[61962]: DEBUG nova.compute.manager [None req-743cda8f-d191-4576-bac3-fc65868f1692 tempest-ServerShowV247Test-1910617893 tempest-ServerShowV247Test-1910617893-project-member] [instance: 4d2bbdf5-0581-4a85-b962-36686a464bff] Starting instance... {{(pid=61962) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2424}} [ 1379.518368] env[61962]: DEBUG nova.compute.manager [None req-743cda8f-d191-4576-bac3-fc65868f1692 tempest-ServerShowV247Test-1910617893 tempest-ServerShowV247Test-1910617893-project-member] [instance: 4d2bbdf5-0581-4a85-b962-36686a464bff] Instance disappeared before build. {{(pid=61962) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2431}} [ 1379.540998] env[61962]: DEBUG oslo_concurrency.lockutils [None req-743cda8f-d191-4576-bac3-fc65868f1692 tempest-ServerShowV247Test-1910617893 tempest-ServerShowV247Test-1910617893-project-member] Lock "4d2bbdf5-0581-4a85-b962-36686a464bff" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 201.964s {{(pid=61962) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1379.550911] env[61962]: DEBUG nova.compute.manager [None req-b24f0ed0-aeb8-484d-b3ea-a911986d6121 tempest-ServerShowV247Test-1910617893 tempest-ServerShowV247Test-1910617893-project-member] [instance: 989e57f2-6dac-4734-b895-5724b1c7a06b] Starting instance... {{(pid=61962) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2424}} [ 1379.575092] env[61962]: DEBUG nova.compute.manager [None req-b24f0ed0-aeb8-484d-b3ea-a911986d6121 tempest-ServerShowV247Test-1910617893 tempest-ServerShowV247Test-1910617893-project-member] [instance: 989e57f2-6dac-4734-b895-5724b1c7a06b] Instance disappeared before build. {{(pid=61962) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2431}} [ 1379.603606] env[61962]: DEBUG oslo_concurrency.lockutils [None req-b24f0ed0-aeb8-484d-b3ea-a911986d6121 tempest-ServerShowV247Test-1910617893 tempest-ServerShowV247Test-1910617893-project-member] Lock "989e57f2-6dac-4734-b895-5724b1c7a06b" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 202.025s {{(pid=61962) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1379.613364] env[61962]: DEBUG nova.compute.manager [None req-9dfce35c-cfdc-4c34-8fd9-1d51d314d579 tempest-AttachInterfacesTestJSON-87713823 tempest-AttachInterfacesTestJSON-87713823-project-member] [instance: 6f0c884b-afb3-4d06-9dd2-f07099c364e0] Starting instance... {{(pid=61962) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2424}} [ 1379.675525] env[61962]: DEBUG oslo_concurrency.lockutils [None req-9dfce35c-cfdc-4c34-8fd9-1d51d314d579 tempest-AttachInterfacesTestJSON-87713823 tempest-AttachInterfacesTestJSON-87713823-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61962) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1379.675774] env[61962]: DEBUG oslo_concurrency.lockutils [None req-9dfce35c-cfdc-4c34-8fd9-1d51d314d579 tempest-AttachInterfacesTestJSON-87713823 tempest-AttachInterfacesTestJSON-87713823-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=61962) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1379.677621] env[61962]: INFO nova.compute.claims [None req-9dfce35c-cfdc-4c34-8fd9-1d51d314d579 tempest-AttachInterfacesTestJSON-87713823 tempest-AttachInterfacesTestJSON-87713823-project-member] [instance: 6f0c884b-afb3-4d06-9dd2-f07099c364e0] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1380.006493] env[61962]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2d506d16-4e69-4fcd-ac55-6abe268c1968 {{(pid=61962) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1380.014716] env[61962]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d3ce1975-1ed8-4c5f-a969-50e83177ca25 {{(pid=61962) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1380.045705] env[61962]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bc7ac48f-6ab5-4f56-b517-5a754b3b01f6 {{(pid=61962) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1380.053710] env[61962]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0cbc277e-30ef-4422-b4b9-adf14055a3c1 {{(pid=61962) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1380.067326] env[61962]: DEBUG nova.compute.provider_tree [None req-9dfce35c-cfdc-4c34-8fd9-1d51d314d579 tempest-AttachInterfacesTestJSON-87713823 tempest-AttachInterfacesTestJSON-87713823-project-member] Inventory has not changed in ProviderTree for provider: 5a20dc57-fddd-49ec-bab5-953a03eebaa1 {{(pid=61962) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1380.077833] env[61962]: DEBUG nova.scheduler.client.report [None req-9dfce35c-cfdc-4c34-8fd9-1d51d314d579 tempest-AttachInterfacesTestJSON-87713823 tempest-AttachInterfacesTestJSON-87713823-project-member] Inventory has not changed for provider 5a20dc57-fddd-49ec-bab5-953a03eebaa1 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 96, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61962) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1380.092284] env[61962]: DEBUG oslo_concurrency.lockutils [None req-9dfce35c-cfdc-4c34-8fd9-1d51d314d579 tempest-AttachInterfacesTestJSON-87713823 tempest-AttachInterfacesTestJSON-87713823-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.416s {{(pid=61962) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1380.092821] env[61962]: DEBUG nova.compute.manager [None req-9dfce35c-cfdc-4c34-8fd9-1d51d314d579 tempest-AttachInterfacesTestJSON-87713823 tempest-AttachInterfacesTestJSON-87713823-project-member] [instance: 6f0c884b-afb3-4d06-9dd2-f07099c364e0] Start building networks asynchronously for instance. {{(pid=61962) _build_resources /opt/stack/nova/nova/compute/manager.py:2821}} [ 1380.143149] env[61962]: DEBUG nova.compute.utils [None req-9dfce35c-cfdc-4c34-8fd9-1d51d314d579 tempest-AttachInterfacesTestJSON-87713823 tempest-AttachInterfacesTestJSON-87713823-project-member] Using /dev/sd instead of None {{(pid=61962) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1380.144443] env[61962]: DEBUG nova.compute.manager [None req-9dfce35c-cfdc-4c34-8fd9-1d51d314d579 tempest-AttachInterfacesTestJSON-87713823 tempest-AttachInterfacesTestJSON-87713823-project-member] [instance: 6f0c884b-afb3-4d06-9dd2-f07099c364e0] Allocating IP information in the background. {{(pid=61962) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1973}} [ 1380.144628] env[61962]: DEBUG nova.network.neutron [None req-9dfce35c-cfdc-4c34-8fd9-1d51d314d579 tempest-AttachInterfacesTestJSON-87713823 tempest-AttachInterfacesTestJSON-87713823-project-member] [instance: 6f0c884b-afb3-4d06-9dd2-f07099c364e0] allocate_for_instance() {{(pid=61962) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 1380.154019] env[61962]: DEBUG nova.compute.manager [None req-9dfce35c-cfdc-4c34-8fd9-1d51d314d579 tempest-AttachInterfacesTestJSON-87713823 tempest-AttachInterfacesTestJSON-87713823-project-member] [instance: 6f0c884b-afb3-4d06-9dd2-f07099c364e0] Start building block device mappings for instance. {{(pid=61962) _build_resources /opt/stack/nova/nova/compute/manager.py:2856}} [ 1380.217395] env[61962]: DEBUG nova.policy [None req-9dfce35c-cfdc-4c34-8fd9-1d51d314d579 tempest-AttachInterfacesTestJSON-87713823 tempest-AttachInterfacesTestJSON-87713823-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '5adbe55e80cb4334a788f16c9e1a60c4', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'a31ea93dd586435ebb82c1dbfd14c2ad', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61962) authorize /opt/stack/nova/nova/policy.py:203}} [ 1380.227840] env[61962]: DEBUG nova.compute.manager [None req-9dfce35c-cfdc-4c34-8fd9-1d51d314d579 tempest-AttachInterfacesTestJSON-87713823 tempest-AttachInterfacesTestJSON-87713823-project-member] [instance: 6f0c884b-afb3-4d06-9dd2-f07099c364e0] Start spawning the instance on the hypervisor. {{(pid=61962) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2630}} [ 1380.260373] env[61962]: DEBUG nova.virt.hardware [None req-9dfce35c-cfdc-4c34-8fd9-1d51d314d579 tempest-AttachInterfacesTestJSON-87713823 tempest-AttachInterfacesTestJSON-87713823-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-12-01T12:09:23Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=128,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-12-01T12:09:07Z,direct_url=,disk_format='vmdk',id=f684bb17-3ab2-4bd5-a19e-4c36c57d0ebe,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='eef556fb5c4f49b889491ae31a496de5',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-12-01T12:09:08Z,virtual_size=,visibility=), allow threads: False {{(pid=61962) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1380.260680] env[61962]: DEBUG nova.virt.hardware [None req-9dfce35c-cfdc-4c34-8fd9-1d51d314d579 tempest-AttachInterfacesTestJSON-87713823 tempest-AttachInterfacesTestJSON-87713823-project-member] Flavor limits 0:0:0 {{(pid=61962) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1380.260846] env[61962]: DEBUG nova.virt.hardware [None req-9dfce35c-cfdc-4c34-8fd9-1d51d314d579 tempest-AttachInterfacesTestJSON-87713823 tempest-AttachInterfacesTestJSON-87713823-project-member] Image limits 0:0:0 {{(pid=61962) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1380.261138] env[61962]: DEBUG nova.virt.hardware [None req-9dfce35c-cfdc-4c34-8fd9-1d51d314d579 tempest-AttachInterfacesTestJSON-87713823 tempest-AttachInterfacesTestJSON-87713823-project-member] Flavor pref 0:0:0 {{(pid=61962) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1380.261241] env[61962]: DEBUG nova.virt.hardware [None req-9dfce35c-cfdc-4c34-8fd9-1d51d314d579 tempest-AttachInterfacesTestJSON-87713823 tempest-AttachInterfacesTestJSON-87713823-project-member] Image pref 0:0:0 {{(pid=61962) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1380.261352] env[61962]: DEBUG nova.virt.hardware [None req-9dfce35c-cfdc-4c34-8fd9-1d51d314d579 tempest-AttachInterfacesTestJSON-87713823 tempest-AttachInterfacesTestJSON-87713823-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61962) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1380.261673] env[61962]: DEBUG nova.virt.hardware [None req-9dfce35c-cfdc-4c34-8fd9-1d51d314d579 tempest-AttachInterfacesTestJSON-87713823 tempest-AttachInterfacesTestJSON-87713823-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61962) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1380.261760] env[61962]: DEBUG nova.virt.hardware [None req-9dfce35c-cfdc-4c34-8fd9-1d51d314d579 tempest-AttachInterfacesTestJSON-87713823 tempest-AttachInterfacesTestJSON-87713823-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61962) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1380.261886] env[61962]: DEBUG nova.virt.hardware [None req-9dfce35c-cfdc-4c34-8fd9-1d51d314d579 tempest-AttachInterfacesTestJSON-87713823 tempest-AttachInterfacesTestJSON-87713823-project-member] Got 1 possible topologies {{(pid=61962) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1380.262118] env[61962]: DEBUG nova.virt.hardware [None req-9dfce35c-cfdc-4c34-8fd9-1d51d314d579 tempest-AttachInterfacesTestJSON-87713823 tempest-AttachInterfacesTestJSON-87713823-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61962) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1380.262306] env[61962]: DEBUG nova.virt.hardware [None req-9dfce35c-cfdc-4c34-8fd9-1d51d314d579 tempest-AttachInterfacesTestJSON-87713823 tempest-AttachInterfacesTestJSON-87713823-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61962) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1380.263218] env[61962]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-32859212-123b-418a-8aef-9237e30bc9bc {{(pid=61962) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1380.273865] env[61962]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2e1ab188-98b9-4fbc-89f4-ac1f6b3bb1b4 {{(pid=61962) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1380.633886] env[61962]: DEBUG nova.network.neutron [None req-9dfce35c-cfdc-4c34-8fd9-1d51d314d579 tempest-AttachInterfacesTestJSON-87713823 tempest-AttachInterfacesTestJSON-87713823-project-member] [instance: 6f0c884b-afb3-4d06-9dd2-f07099c364e0] Successfully created port: ae04c44a-85ea-427f-ac84-cfa45f24d102 {{(pid=61962) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1381.232698] env[61962]: DEBUG nova.compute.manager [req-12f7b032-9748-4a8e-a8a1-df3c0eb4e316 req-4e0e8264-fff3-445c-97ff-6d366558d0cd service nova] [instance: 6f0c884b-afb3-4d06-9dd2-f07099c364e0] Received event network-vif-plugged-ae04c44a-85ea-427f-ac84-cfa45f24d102 {{(pid=61962) external_instance_event /opt/stack/nova/nova/compute/manager.py:11210}} [ 1381.232985] env[61962]: DEBUG oslo_concurrency.lockutils [req-12f7b032-9748-4a8e-a8a1-df3c0eb4e316 req-4e0e8264-fff3-445c-97ff-6d366558d0cd service nova] Acquiring lock "6f0c884b-afb3-4d06-9dd2-f07099c364e0-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=61962) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1381.233135] env[61962]: DEBUG oslo_concurrency.lockutils [req-12f7b032-9748-4a8e-a8a1-df3c0eb4e316 req-4e0e8264-fff3-445c-97ff-6d366558d0cd service nova] Lock "6f0c884b-afb3-4d06-9dd2-f07099c364e0-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=61962) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1381.233310] env[61962]: DEBUG oslo_concurrency.lockutils [req-12f7b032-9748-4a8e-a8a1-df3c0eb4e316 req-4e0e8264-fff3-445c-97ff-6d366558d0cd service nova] Lock "6f0c884b-afb3-4d06-9dd2-f07099c364e0-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=61962) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1381.233477] env[61962]: DEBUG nova.compute.manager [req-12f7b032-9748-4a8e-a8a1-df3c0eb4e316 req-4e0e8264-fff3-445c-97ff-6d366558d0cd service nova] [instance: 6f0c884b-afb3-4d06-9dd2-f07099c364e0] No waiting events found dispatching network-vif-plugged-ae04c44a-85ea-427f-ac84-cfa45f24d102 {{(pid=61962) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1381.233642] env[61962]: WARNING nova.compute.manager [req-12f7b032-9748-4a8e-a8a1-df3c0eb4e316 req-4e0e8264-fff3-445c-97ff-6d366558d0cd service nova] [instance: 6f0c884b-afb3-4d06-9dd2-f07099c364e0] Received unexpected event network-vif-plugged-ae04c44a-85ea-427f-ac84-cfa45f24d102 for instance with vm_state building and task_state spawning. [ 1381.322780] env[61962]: DEBUG nova.network.neutron [None req-9dfce35c-cfdc-4c34-8fd9-1d51d314d579 tempest-AttachInterfacesTestJSON-87713823 tempest-AttachInterfacesTestJSON-87713823-project-member] [instance: 6f0c884b-afb3-4d06-9dd2-f07099c364e0] Successfully updated port: ae04c44a-85ea-427f-ac84-cfa45f24d102 {{(pid=61962) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1381.339057] env[61962]: DEBUG oslo_concurrency.lockutils [None req-9dfce35c-cfdc-4c34-8fd9-1d51d314d579 tempest-AttachInterfacesTestJSON-87713823 tempest-AttachInterfacesTestJSON-87713823-project-member] Acquiring lock "refresh_cache-6f0c884b-afb3-4d06-9dd2-f07099c364e0" {{(pid=61962) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1381.339233] env[61962]: DEBUG oslo_concurrency.lockutils [None req-9dfce35c-cfdc-4c34-8fd9-1d51d314d579 tempest-AttachInterfacesTestJSON-87713823 tempest-AttachInterfacesTestJSON-87713823-project-member] Acquired lock "refresh_cache-6f0c884b-afb3-4d06-9dd2-f07099c364e0" {{(pid=61962) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1381.339425] env[61962]: DEBUG nova.network.neutron [None req-9dfce35c-cfdc-4c34-8fd9-1d51d314d579 tempest-AttachInterfacesTestJSON-87713823 tempest-AttachInterfacesTestJSON-87713823-project-member] [instance: 6f0c884b-afb3-4d06-9dd2-f07099c364e0] Building network info cache for instance {{(pid=61962) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 1381.402743] env[61962]: DEBUG nova.network.neutron [None req-9dfce35c-cfdc-4c34-8fd9-1d51d314d579 tempest-AttachInterfacesTestJSON-87713823 tempest-AttachInterfacesTestJSON-87713823-project-member] [instance: 6f0c884b-afb3-4d06-9dd2-f07099c364e0] Instance cache missing network info. {{(pid=61962) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 1381.652365] env[61962]: DEBUG nova.network.neutron [None req-9dfce35c-cfdc-4c34-8fd9-1d51d314d579 tempest-AttachInterfacesTestJSON-87713823 tempest-AttachInterfacesTestJSON-87713823-project-member] [instance: 6f0c884b-afb3-4d06-9dd2-f07099c364e0] Updating instance_info_cache with network_info: [{"id": "ae04c44a-85ea-427f-ac84-cfa45f24d102", "address": "fa:16:3e:fa:9f:ba", "network": {"id": "e7c201f8-42a9-48f2-a499-92bcb4866e38", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-63616492-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "a31ea93dd586435ebb82c1dbfd14c2ad", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "41278529-8bd2-44a1-97c8-03967faa3ff7", "external-id": "nsx-vlan-transportzone-749", "segmentation_id": 749, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapae04c44a-85", "ovs_interfaceid": "ae04c44a-85ea-427f-ac84-cfa45f24d102", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61962) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1381.666873] env[61962]: DEBUG oslo_concurrency.lockutils [None req-9dfce35c-cfdc-4c34-8fd9-1d51d314d579 tempest-AttachInterfacesTestJSON-87713823 tempest-AttachInterfacesTestJSON-87713823-project-member] Releasing lock "refresh_cache-6f0c884b-afb3-4d06-9dd2-f07099c364e0" {{(pid=61962) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1381.667355] env[61962]: DEBUG nova.compute.manager [None req-9dfce35c-cfdc-4c34-8fd9-1d51d314d579 tempest-AttachInterfacesTestJSON-87713823 tempest-AttachInterfacesTestJSON-87713823-project-member] [instance: 6f0c884b-afb3-4d06-9dd2-f07099c364e0] Instance network_info: |[{"id": "ae04c44a-85ea-427f-ac84-cfa45f24d102", "address": "fa:16:3e:fa:9f:ba", "network": {"id": "e7c201f8-42a9-48f2-a499-92bcb4866e38", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-63616492-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "a31ea93dd586435ebb82c1dbfd14c2ad", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "41278529-8bd2-44a1-97c8-03967faa3ff7", "external-id": "nsx-vlan-transportzone-749", "segmentation_id": 749, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapae04c44a-85", "ovs_interfaceid": "ae04c44a-85ea-427f-ac84-cfa45f24d102", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=61962) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1381.668112] env[61962]: DEBUG nova.virt.vmwareapi.vmops [None req-9dfce35c-cfdc-4c34-8fd9-1d51d314d579 tempest-AttachInterfacesTestJSON-87713823 tempest-AttachInterfacesTestJSON-87713823-project-member] [instance: 6f0c884b-afb3-4d06-9dd2-f07099c364e0] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:fa:9f:ba', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '41278529-8bd2-44a1-97c8-03967faa3ff7', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'ae04c44a-85ea-427f-ac84-cfa45f24d102', 'vif_model': 'vmxnet3'}] {{(pid=61962) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1381.676187] env[61962]: DEBUG oslo.service.loopingcall [None req-9dfce35c-cfdc-4c34-8fd9-1d51d314d579 tempest-AttachInterfacesTestJSON-87713823 tempest-AttachInterfacesTestJSON-87713823-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=61962) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1381.677050] env[61962]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 6f0c884b-afb3-4d06-9dd2-f07099c364e0] Creating VM on the ESX host {{(pid=61962) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 1381.677293] env[61962]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-fe0c7b3f-1c5f-4742-baa4-4a622773ce45 {{(pid=61962) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1381.697878] env[61962]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1381.697878] env[61962]: value = "task-4892035" [ 1381.697878] env[61962]: _type = "Task" [ 1381.697878] env[61962]: } to complete. {{(pid=61962) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1381.707844] env[61962]: DEBUG oslo_vmware.api [-] Task: {'id': task-4892035, 'name': CreateVM_Task} progress is 0%. {{(pid=61962) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1382.209367] env[61962]: DEBUG oslo_vmware.api [-] Task: {'id': task-4892035, 'name': CreateVM_Task, 'duration_secs': 0.314369} completed successfully. {{(pid=61962) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1382.209551] env[61962]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 6f0c884b-afb3-4d06-9dd2-f07099c364e0] Created VM on the ESX host {{(pid=61962) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 1382.211271] env[61962]: DEBUG oslo_concurrency.lockutils [None req-9dfce35c-cfdc-4c34-8fd9-1d51d314d579 tempest-AttachInterfacesTestJSON-87713823 tempest-AttachInterfacesTestJSON-87713823-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/f684bb17-3ab2-4bd5-a19e-4c36c57d0ebe" {{(pid=61962) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1382.211478] env[61962]: DEBUG oslo_concurrency.lockutils [None req-9dfce35c-cfdc-4c34-8fd9-1d51d314d579 tempest-AttachInterfacesTestJSON-87713823 tempest-AttachInterfacesTestJSON-87713823-project-member] Acquired lock "[datastore2] devstack-image-cache_base/f684bb17-3ab2-4bd5-a19e-4c36c57d0ebe" {{(pid=61962) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1382.211803] env[61962]: DEBUG oslo_concurrency.lockutils [None req-9dfce35c-cfdc-4c34-8fd9-1d51d314d579 tempest-AttachInterfacesTestJSON-87713823 tempest-AttachInterfacesTestJSON-87713823-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/f684bb17-3ab2-4bd5-a19e-4c36c57d0ebe" {{(pid=61962) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1382.212106] env[61962]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-e6188271-d095-44a3-b5fc-96761d2dbf3f {{(pid=61962) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1382.217460] env[61962]: DEBUG oslo_vmware.api [None req-9dfce35c-cfdc-4c34-8fd9-1d51d314d579 tempest-AttachInterfacesTestJSON-87713823 tempest-AttachInterfacesTestJSON-87713823-project-member] Waiting for the task: (returnval){ [ 1382.217460] env[61962]: value = "session[5210918f-aadc-9b29-42fa-0929c2e42627]52efe85b-592e-0cec-5b0b-17bb34f22993" [ 1382.217460] env[61962]: _type = "Task" [ 1382.217460] env[61962]: } to complete. {{(pid=61962) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1382.226609] env[61962]: DEBUG oslo_vmware.api [None req-9dfce35c-cfdc-4c34-8fd9-1d51d314d579 tempest-AttachInterfacesTestJSON-87713823 tempest-AttachInterfacesTestJSON-87713823-project-member] Task: {'id': session[5210918f-aadc-9b29-42fa-0929c2e42627]52efe85b-592e-0cec-5b0b-17bb34f22993, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61962) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1382.729646] env[61962]: DEBUG oslo_concurrency.lockutils [None req-9dfce35c-cfdc-4c34-8fd9-1d51d314d579 tempest-AttachInterfacesTestJSON-87713823 tempest-AttachInterfacesTestJSON-87713823-project-member] Releasing lock "[datastore2] devstack-image-cache_base/f684bb17-3ab2-4bd5-a19e-4c36c57d0ebe" {{(pid=61962) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1382.729931] env[61962]: DEBUG nova.virt.vmwareapi.vmops [None req-9dfce35c-cfdc-4c34-8fd9-1d51d314d579 tempest-AttachInterfacesTestJSON-87713823 tempest-AttachInterfacesTestJSON-87713823-project-member] [instance: 6f0c884b-afb3-4d06-9dd2-f07099c364e0] Processing image f684bb17-3ab2-4bd5-a19e-4c36c57d0ebe {{(pid=61962) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1382.730164] env[61962]: DEBUG oslo_concurrency.lockutils [None req-9dfce35c-cfdc-4c34-8fd9-1d51d314d579 tempest-AttachInterfacesTestJSON-87713823 tempest-AttachInterfacesTestJSON-87713823-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/f684bb17-3ab2-4bd5-a19e-4c36c57d0ebe/f684bb17-3ab2-4bd5-a19e-4c36c57d0ebe.vmdk" {{(pid=61962) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1383.385516] env[61962]: DEBUG nova.compute.manager [req-db6e05c5-7337-499f-9d11-7dc92499cba7 req-a9b4e631-47c7-4bed-8814-d9e11638a9ae service nova] [instance: 6f0c884b-afb3-4d06-9dd2-f07099c364e0] Received event network-changed-ae04c44a-85ea-427f-ac84-cfa45f24d102 {{(pid=61962) external_instance_event /opt/stack/nova/nova/compute/manager.py:11210}} [ 1383.385716] env[61962]: DEBUG nova.compute.manager [req-db6e05c5-7337-499f-9d11-7dc92499cba7 req-a9b4e631-47c7-4bed-8814-d9e11638a9ae service nova] [instance: 6f0c884b-afb3-4d06-9dd2-f07099c364e0] Refreshing instance network info cache due to event network-changed-ae04c44a-85ea-427f-ac84-cfa45f24d102. {{(pid=61962) external_instance_event /opt/stack/nova/nova/compute/manager.py:11215}} [ 1383.385930] env[61962]: DEBUG oslo_concurrency.lockutils [req-db6e05c5-7337-499f-9d11-7dc92499cba7 req-a9b4e631-47c7-4bed-8814-d9e11638a9ae service nova] Acquiring lock "refresh_cache-6f0c884b-afb3-4d06-9dd2-f07099c364e0" {{(pid=61962) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1383.386087] env[61962]: DEBUG oslo_concurrency.lockutils [req-db6e05c5-7337-499f-9d11-7dc92499cba7 req-a9b4e631-47c7-4bed-8814-d9e11638a9ae service nova] Acquired lock "refresh_cache-6f0c884b-afb3-4d06-9dd2-f07099c364e0" {{(pid=61962) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1383.386259] env[61962]: DEBUG nova.network.neutron [req-db6e05c5-7337-499f-9d11-7dc92499cba7 req-a9b4e631-47c7-4bed-8814-d9e11638a9ae service nova] [instance: 6f0c884b-afb3-4d06-9dd2-f07099c364e0] Refreshing network info cache for port ae04c44a-85ea-427f-ac84-cfa45f24d102 {{(pid=61962) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 1383.677948] env[61962]: DEBUG nova.network.neutron [req-db6e05c5-7337-499f-9d11-7dc92499cba7 req-a9b4e631-47c7-4bed-8814-d9e11638a9ae service nova] [instance: 6f0c884b-afb3-4d06-9dd2-f07099c364e0] Updated VIF entry in instance network info cache for port ae04c44a-85ea-427f-ac84-cfa45f24d102. {{(pid=61962) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 1383.678401] env[61962]: DEBUG nova.network.neutron [req-db6e05c5-7337-499f-9d11-7dc92499cba7 req-a9b4e631-47c7-4bed-8814-d9e11638a9ae service nova] [instance: 6f0c884b-afb3-4d06-9dd2-f07099c364e0] Updating instance_info_cache with network_info: [{"id": "ae04c44a-85ea-427f-ac84-cfa45f24d102", "address": "fa:16:3e:fa:9f:ba", "network": {"id": "e7c201f8-42a9-48f2-a499-92bcb4866e38", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-63616492-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "a31ea93dd586435ebb82c1dbfd14c2ad", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "41278529-8bd2-44a1-97c8-03967faa3ff7", "external-id": "nsx-vlan-transportzone-749", "segmentation_id": 749, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapae04c44a-85", "ovs_interfaceid": "ae04c44a-85ea-427f-ac84-cfa45f24d102", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61962) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1383.688876] env[61962]: DEBUG oslo_concurrency.lockutils [req-db6e05c5-7337-499f-9d11-7dc92499cba7 req-a9b4e631-47c7-4bed-8814-d9e11638a9ae service nova] Releasing lock "refresh_cache-6f0c884b-afb3-4d06-9dd2-f07099c364e0" {{(pid=61962) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1390.224984] env[61962]: DEBUG oslo_concurrency.lockutils [None req-bc1071d0-f834-436e-a83a-5da09b6b7428 tempest-AttachInterfacesTestJSON-87713823 tempest-AttachInterfacesTestJSON-87713823-project-member] Acquiring lock "6f0c884b-afb3-4d06-9dd2-f07099c364e0" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=61962) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1405.158668] env[61962]: DEBUG oslo_service.periodic_task [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Running periodic task ComputeManager.update_available_resource {{(pid=61962) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1405.174019] env[61962]: DEBUG oslo_concurrency.lockutils [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=61962) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1405.174019] env[61962]: DEBUG oslo_concurrency.lockutils [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=61962) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1405.174019] env[61962]: DEBUG oslo_concurrency.lockutils [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=61962) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1405.174019] env[61962]: DEBUG nova.compute.resource_tracker [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=61962) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 1405.174019] env[61962]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-28318a98-2d92-4080-89a7-33c6799074bc {{(pid=61962) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1405.182823] env[61962]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-edecd9c3-529c-432b-b1f7-53ab993f863d {{(pid=61962) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1405.199114] env[61962]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a6feca1c-2694-4f88-929d-04b2eaa189d1 {{(pid=61962) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1405.206617] env[61962]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d7900235-2133-4457-8436-f757aff77acb {{(pid=61962) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1405.237076] env[61962]: DEBUG nova.compute.resource_tracker [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=180556MB free_disk=96GB free_vcpus=48 pci_devices=None {{(pid=61962) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 1405.237250] env[61962]: DEBUG oslo_concurrency.lockutils [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=61962) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1405.237482] env[61962]: DEBUG oslo_concurrency.lockutils [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=61962) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1405.317617] env[61962]: DEBUG nova.compute.resource_tracker [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Instance 454eebcc-ea18-4046-ba2e-01de1c8a954a actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61962) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 1405.317773] env[61962]: DEBUG nova.compute.resource_tracker [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Instance e76b5908-3bad-4c19-90e7-b15ca253ad80 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61962) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 1405.317899] env[61962]: DEBUG nova.compute.resource_tracker [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Instance 8ecd73a7-7f23-4994-85b9-479584831687 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61962) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 1405.318032] env[61962]: DEBUG nova.compute.resource_tracker [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Instance d89abe69-dd71-4cfe-b80b-3b2aeba0b8a9 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61962) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 1405.318157] env[61962]: DEBUG nova.compute.resource_tracker [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Instance 5572a1d3-5d2f-4af5-b9f6-ec57506e2069 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61962) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 1405.318275] env[61962]: DEBUG nova.compute.resource_tracker [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Instance d970d822-12ab-43cc-8d34-abc6eff8a68d actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61962) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 1405.318392] env[61962]: DEBUG nova.compute.resource_tracker [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Instance 35bf25a7-a3c2-40f1-b415-42d6167b0a1c actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61962) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 1405.318508] env[61962]: DEBUG nova.compute.resource_tracker [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Instance 1f7429fe-e6f3-4b0a-bae8-38ead4607ae7 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61962) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 1405.318697] env[61962]: DEBUG nova.compute.resource_tracker [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Instance c3c36949-754e-4bab-860d-74e0cde4dfd6 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61962) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 1405.318867] env[61962]: DEBUG nova.compute.resource_tracker [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Instance 6f0c884b-afb3-4d06-9dd2-f07099c364e0 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61962) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 1405.334883] env[61962]: DEBUG nova.compute.resource_tracker [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Instance 5b261fba-bbdf-4ba9-a065-e833f6e44ba2 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61962) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1767}} [ 1405.345827] env[61962]: DEBUG nova.compute.resource_tracker [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Instance 4453ec6d-57f7-4ae1-bafa-4dd993c22d03 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61962) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1767}} [ 1405.374027] env[61962]: DEBUG nova.compute.resource_tracker [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Instance be426e8c-aac5-4510-a9e2-20cc79a00cae has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61962) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1767}} [ 1405.384865] env[61962]: DEBUG nova.compute.resource_tracker [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Instance e62eace3-3320-441e-becb-abff7cdd6660 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61962) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1767}} [ 1405.395537] env[61962]: DEBUG nova.compute.resource_tracker [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Instance 3d2749f8-18d2-4631-adc2-093988125eb4 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61962) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1767}} [ 1405.405486] env[61962]: DEBUG nova.compute.resource_tracker [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Instance 1035139f-2193-4d1c-a220-369ef6096ae0 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61962) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1767}} [ 1405.415371] env[61962]: DEBUG nova.compute.resource_tracker [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Instance 53048279-c3ea-484f-9a3e-049b12892f7a has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61962) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1767}} [ 1405.425066] env[61962]: DEBUG nova.compute.resource_tracker [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Instance 4de50eaf-ee14-4aee-b5c0-d08dc5fc8010 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61962) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1767}} [ 1405.425290] env[61962]: DEBUG nova.compute.resource_tracker [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Total usable vcpus: 48, total allocated vcpus: 10 {{(pid=61962) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 1405.425454] env[61962]: DEBUG nova.compute.resource_tracker [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=1792MB phys_disk=100GB used_disk=10GB total_vcpus=48 used_vcpus=10 pci_stats=[] stats={'failed_builds': '60', 'num_instances': '10', 'num_vm_building': '10', 'num_task_deleting': '10', 'num_os_type_None': '10', 'num_proj_a31ea93dd586435ebb82c1dbfd14c2ad': '2', 'io_workload': '10', 'num_proj_3285f5bbd10d495e852d0ef0c0047847': '1', 'num_proj_8e79e89d66b44fa092d586334732c0be': '1', 'num_proj_e58b68272c404983993a09ad12e8c03b': '1', 'num_proj_99c377cc8b5545a2b6e712291b14cb04': '1', 'num_proj_696cc1bc78cb4511a76f788d82895fef': '1', 'num_proj_a9c2add945d24a299e5bb0fe543deec3': '1', 'num_proj_e816704de44649099e96127498eed7e9': '1', 'num_proj_eaa1993aea5f4f039f35b32e2321e30f': '1'} {{(pid=61962) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 1405.624763] env[61962]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-235dcb47-d1df-4e6a-b41b-1f9c7e2208e7 {{(pid=61962) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1405.632713] env[61962]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dd1ffc4f-d3af-49b4-a33f-9c39ee929a9c {{(pid=61962) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1405.664145] env[61962]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c856e7ae-9318-4c15-a37c-a143dd39fbe6 {{(pid=61962) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1405.671709] env[61962]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-10e93965-ae32-4a77-92e4-50906a623e59 {{(pid=61962) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1405.684996] env[61962]: DEBUG nova.compute.provider_tree [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Inventory has not changed in ProviderTree for provider: 5a20dc57-fddd-49ec-bab5-953a03eebaa1 {{(pid=61962) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1405.693607] env[61962]: DEBUG nova.scheduler.client.report [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Inventory has not changed for provider 5a20dc57-fddd-49ec-bab5-953a03eebaa1 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 96, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61962) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1405.707536] env[61962]: DEBUG nova.compute.resource_tracker [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=61962) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 1405.707536] env[61962]: DEBUG oslo_concurrency.lockutils [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 0.470s {{(pid=61962) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1406.707612] env[61962]: DEBUG oslo_service.periodic_task [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=61962) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1406.708157] env[61962]: DEBUG nova.compute.manager [None req-ba337279-4320-40ca-b616-7e590432f203 None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=61962) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10626}} [ 1408.154753] env[61962]: DEBUG oslo_service.periodic_task [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=61962) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1408.707083] env[61962]: DEBUG oslo_concurrency.lockutils [None req-14df904d-2edb-4fe8-be90-6d0f9cac5476 tempest-AttachVolumeNegativeTest-1446100430 tempest-AttachVolumeNegativeTest-1446100430-project-member] Acquiring lock "25f6fa54-3d0d-4470-b5c9-06ba0ad2663d" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61962) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1408.707339] env[61962]: DEBUG oslo_concurrency.lockutils [None req-14df904d-2edb-4fe8-be90-6d0f9cac5476 tempest-AttachVolumeNegativeTest-1446100430 tempest-AttachVolumeNegativeTest-1446100430-project-member] Lock "25f6fa54-3d0d-4470-b5c9-06ba0ad2663d" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61962) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1412.159612] env[61962]: DEBUG oslo_service.periodic_task [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=61962) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1412.160029] env[61962]: DEBUG nova.compute.manager [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Starting heal instance info cache {{(pid=61962) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10007}} [ 1412.160029] env[61962]: DEBUG nova.compute.manager [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Rebuilding the list of instances to heal {{(pid=61962) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10011}} [ 1412.185481] env[61962]: DEBUG nova.compute.manager [None req-ba337279-4320-40ca-b616-7e590432f203 None None] [instance: 454eebcc-ea18-4046-ba2e-01de1c8a954a] Skipping network cache update for instance because it is Building. {{(pid=61962) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10020}} [ 1412.185666] env[61962]: DEBUG nova.compute.manager [None req-ba337279-4320-40ca-b616-7e590432f203 None None] [instance: e76b5908-3bad-4c19-90e7-b15ca253ad80] Skipping network cache update for instance because it is Building. {{(pid=61962) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10020}} [ 1412.185768] env[61962]: DEBUG nova.compute.manager [None req-ba337279-4320-40ca-b616-7e590432f203 None None] [instance: 8ecd73a7-7f23-4994-85b9-479584831687] Skipping network cache update for instance because it is Building. {{(pid=61962) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10020}} [ 1412.185899] env[61962]: DEBUG nova.compute.manager [None req-ba337279-4320-40ca-b616-7e590432f203 None None] [instance: d89abe69-dd71-4cfe-b80b-3b2aeba0b8a9] Skipping network cache update for instance because it is Building. {{(pid=61962) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10020}} [ 1412.186034] env[61962]: DEBUG nova.compute.manager [None req-ba337279-4320-40ca-b616-7e590432f203 None None] [instance: 5572a1d3-5d2f-4af5-b9f6-ec57506e2069] Skipping network cache update for instance because it is Building. {{(pid=61962) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10020}} [ 1412.186160] env[61962]: DEBUG nova.compute.manager [None req-ba337279-4320-40ca-b616-7e590432f203 None None] [instance: d970d822-12ab-43cc-8d34-abc6eff8a68d] Skipping network cache update for instance because it is Building. {{(pid=61962) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10020}} [ 1412.186316] env[61962]: DEBUG nova.compute.manager [None req-ba337279-4320-40ca-b616-7e590432f203 None None] [instance: 35bf25a7-a3c2-40f1-b415-42d6167b0a1c] Skipping network cache update for instance because it is Building. {{(pid=61962) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10020}} [ 1412.186400] env[61962]: DEBUG nova.compute.manager [None req-ba337279-4320-40ca-b616-7e590432f203 None None] [instance: 1f7429fe-e6f3-4b0a-bae8-38ead4607ae7] Skipping network cache update for instance because it is Building. {{(pid=61962) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10020}} [ 1412.186519] env[61962]: DEBUG nova.compute.manager [None req-ba337279-4320-40ca-b616-7e590432f203 None None] [instance: c3c36949-754e-4bab-860d-74e0cde4dfd6] Skipping network cache update for instance because it is Building. {{(pid=61962) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10020}} [ 1412.186635] env[61962]: DEBUG nova.compute.manager [None req-ba337279-4320-40ca-b616-7e590432f203 None None] [instance: 6f0c884b-afb3-4d06-9dd2-f07099c364e0] Skipping network cache update for instance because it is Building. {{(pid=61962) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10020}} [ 1412.186754] env[61962]: DEBUG nova.compute.manager [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Didn't find any instances for network info cache update. {{(pid=61962) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10093}} [ 1412.187336] env[61962]: DEBUG oslo_service.periodic_task [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=61962) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1413.159052] env[61962]: DEBUG oslo_service.periodic_task [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=61962) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1413.159052] env[61962]: DEBUG oslo_service.periodic_task [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Running periodic task ComputeManager._cleanup_incomplete_migrations {{(pid=61962) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1413.159052] env[61962]: DEBUG nova.compute.manager [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Cleaning up deleted instances with incomplete migration {{(pid=61962) _cleanup_incomplete_migrations /opt/stack/nova/nova/compute/manager.py:11345}} [ 1415.168238] env[61962]: DEBUG oslo_service.periodic_task [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=61962) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1417.158565] env[61962]: DEBUG oslo_service.periodic_task [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=61962) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1418.158699] env[61962]: DEBUG oslo_service.periodic_task [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=61962) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1418.159037] env[61962]: DEBUG oslo_service.periodic_task [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Running periodic task ComputeManager._run_pending_deletes {{(pid=61962) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1418.159133] env[61962]: DEBUG nova.compute.manager [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Cleaning up deleted instances {{(pid=61962) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11307}} [ 1418.173157] env[61962]: DEBUG nova.compute.manager [None req-ba337279-4320-40ca-b616-7e590432f203 None None] There are 0 instances to clean {{(pid=61962) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11316}} [ 1420.159146] env[61962]: DEBUG oslo_service.periodic_task [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Running periodic task ComputeManager._cleanup_expired_console_auth_tokens {{(pid=61962) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1421.308741] env[61962]: DEBUG oslo_concurrency.lockutils [None req-86bf0892-b0db-4db9-87c8-1ce8e652eaff tempest-ServerDiskConfigTestJSON-1190480345 tempest-ServerDiskConfigTestJSON-1190480345-project-member] Acquiring lock "d9b93c9e-bc68-4d00-bd14-031dd4b61919" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61962) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1421.309114] env[61962]: DEBUG oslo_concurrency.lockutils [None req-86bf0892-b0db-4db9-87c8-1ce8e652eaff tempest-ServerDiskConfigTestJSON-1190480345 tempest-ServerDiskConfigTestJSON-1190480345-project-member] Lock "d9b93c9e-bc68-4d00-bd14-031dd4b61919" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61962) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1423.447495] env[61962]: WARNING oslo_vmware.rw_handles [None req-e1d514b2-824e-487e-a10a-f30e148f321b tempest-AttachInterfacesTestJSON-87713823 tempest-AttachInterfacesTestJSON-87713823-project-member] Error occurred while reading the HTTP response.: http.client.RemoteDisconnected: Remote end closed connection without response [ 1423.447495] env[61962]: ERROR oslo_vmware.rw_handles Traceback (most recent call last): [ 1423.447495] env[61962]: ERROR oslo_vmware.rw_handles File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py", line 283, in close [ 1423.447495] env[61962]: ERROR oslo_vmware.rw_handles self._conn.getresponse() [ 1423.447495] env[61962]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 1375, in getresponse [ 1423.447495] env[61962]: ERROR oslo_vmware.rw_handles response.begin() [ 1423.447495] env[61962]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 318, in begin [ 1423.447495] env[61962]: ERROR oslo_vmware.rw_handles version, status, reason = self._read_status() [ 1423.447495] env[61962]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 287, in _read_status [ 1423.447495] env[61962]: ERROR oslo_vmware.rw_handles raise RemoteDisconnected("Remote end closed connection without" [ 1423.447495] env[61962]: ERROR oslo_vmware.rw_handles http.client.RemoteDisconnected: Remote end closed connection without response [ 1423.447495] env[61962]: ERROR oslo_vmware.rw_handles [ 1423.448276] env[61962]: DEBUG nova.virt.vmwareapi.images [None req-e1d514b2-824e-487e-a10a-f30e148f321b tempest-AttachInterfacesTestJSON-87713823 tempest-AttachInterfacesTestJSON-87713823-project-member] [instance: 454eebcc-ea18-4046-ba2e-01de1c8a954a] Downloaded image file data f684bb17-3ab2-4bd5-a19e-4c36c57d0ebe to vmware_temp/3403cac4-06b4-4fcd-9a66-c0c13ef50e54/f684bb17-3ab2-4bd5-a19e-4c36c57d0ebe/tmp-sparse.vmdk on the data store datastore2 {{(pid=61962) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:258}} [ 1423.449802] env[61962]: DEBUG nova.virt.vmwareapi.vmops [None req-e1d514b2-824e-487e-a10a-f30e148f321b tempest-AttachInterfacesTestJSON-87713823 tempest-AttachInterfacesTestJSON-87713823-project-member] [instance: 454eebcc-ea18-4046-ba2e-01de1c8a954a] Caching image {{(pid=61962) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 1423.450079] env[61962]: DEBUG nova.virt.vmwareapi.vm_util [None req-e1d514b2-824e-487e-a10a-f30e148f321b tempest-AttachInterfacesTestJSON-87713823 tempest-AttachInterfacesTestJSON-87713823-project-member] Copying Virtual Disk [datastore2] vmware_temp/3403cac4-06b4-4fcd-9a66-c0c13ef50e54/f684bb17-3ab2-4bd5-a19e-4c36c57d0ebe/tmp-sparse.vmdk to [datastore2] vmware_temp/3403cac4-06b4-4fcd-9a66-c0c13ef50e54/f684bb17-3ab2-4bd5-a19e-4c36c57d0ebe/f684bb17-3ab2-4bd5-a19e-4c36c57d0ebe.vmdk {{(pid=61962) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 1423.450397] env[61962]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-1f04586e-b133-4a05-9ded-6556dfd36ddd {{(pid=61962) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1423.459471] env[61962]: DEBUG oslo_vmware.api [None req-e1d514b2-824e-487e-a10a-f30e148f321b tempest-AttachInterfacesTestJSON-87713823 tempest-AttachInterfacesTestJSON-87713823-project-member] Waiting for the task: (returnval){ [ 1423.459471] env[61962]: value = "task-4892036" [ 1423.459471] env[61962]: _type = "Task" [ 1423.459471] env[61962]: } to complete. {{(pid=61962) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1423.467775] env[61962]: DEBUG oslo_vmware.api [None req-e1d514b2-824e-487e-a10a-f30e148f321b tempest-AttachInterfacesTestJSON-87713823 tempest-AttachInterfacesTestJSON-87713823-project-member] Task: {'id': task-4892036, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61962) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1423.970454] env[61962]: DEBUG oslo_vmware.exceptions [None req-e1d514b2-824e-487e-a10a-f30e148f321b tempest-AttachInterfacesTestJSON-87713823 tempest-AttachInterfacesTestJSON-87713823-project-member] Fault InvalidArgument not matched. {{(pid=61962) get_fault_class /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/exceptions.py:290}} [ 1423.970710] env[61962]: DEBUG oslo_concurrency.lockutils [None req-e1d514b2-824e-487e-a10a-f30e148f321b tempest-AttachInterfacesTestJSON-87713823 tempest-AttachInterfacesTestJSON-87713823-project-member] Releasing lock "[datastore2] devstack-image-cache_base/f684bb17-3ab2-4bd5-a19e-4c36c57d0ebe/f684bb17-3ab2-4bd5-a19e-4c36c57d0ebe.vmdk" {{(pid=61962) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1423.971325] env[61962]: ERROR nova.compute.manager [None req-e1d514b2-824e-487e-a10a-f30e148f321b tempest-AttachInterfacesTestJSON-87713823 tempest-AttachInterfacesTestJSON-87713823-project-member] [instance: 454eebcc-ea18-4046-ba2e-01de1c8a954a] Instance failed to spawn: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1423.971325] env[61962]: Faults: ['InvalidArgument'] [ 1423.971325] env[61962]: ERROR nova.compute.manager [instance: 454eebcc-ea18-4046-ba2e-01de1c8a954a] Traceback (most recent call last): [ 1423.971325] env[61962]: ERROR nova.compute.manager [instance: 454eebcc-ea18-4046-ba2e-01de1c8a954a] File "/opt/stack/nova/nova/compute/manager.py", line 2886, in _build_resources [ 1423.971325] env[61962]: ERROR nova.compute.manager [instance: 454eebcc-ea18-4046-ba2e-01de1c8a954a] yield resources [ 1423.971325] env[61962]: ERROR nova.compute.manager [instance: 454eebcc-ea18-4046-ba2e-01de1c8a954a] File "/opt/stack/nova/nova/compute/manager.py", line 2633, in _build_and_run_instance [ 1423.971325] env[61962]: ERROR nova.compute.manager [instance: 454eebcc-ea18-4046-ba2e-01de1c8a954a] self.driver.spawn(context, instance, image_meta, [ 1423.971325] env[61962]: ERROR nova.compute.manager [instance: 454eebcc-ea18-4046-ba2e-01de1c8a954a] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 1423.971325] env[61962]: ERROR nova.compute.manager [instance: 454eebcc-ea18-4046-ba2e-01de1c8a954a] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1423.971325] env[61962]: ERROR nova.compute.manager [instance: 454eebcc-ea18-4046-ba2e-01de1c8a954a] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 1423.971325] env[61962]: ERROR nova.compute.manager [instance: 454eebcc-ea18-4046-ba2e-01de1c8a954a] self._fetch_image_if_missing(context, vi) [ 1423.971325] env[61962]: ERROR nova.compute.manager [instance: 454eebcc-ea18-4046-ba2e-01de1c8a954a] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 1423.971664] env[61962]: ERROR nova.compute.manager [instance: 454eebcc-ea18-4046-ba2e-01de1c8a954a] image_cache(vi, tmp_image_ds_loc) [ 1423.971664] env[61962]: ERROR nova.compute.manager [instance: 454eebcc-ea18-4046-ba2e-01de1c8a954a] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 1423.971664] env[61962]: ERROR nova.compute.manager [instance: 454eebcc-ea18-4046-ba2e-01de1c8a954a] vm_util.copy_virtual_disk( [ 1423.971664] env[61962]: ERROR nova.compute.manager [instance: 454eebcc-ea18-4046-ba2e-01de1c8a954a] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 1423.971664] env[61962]: ERROR nova.compute.manager [instance: 454eebcc-ea18-4046-ba2e-01de1c8a954a] session._wait_for_task(vmdk_copy_task) [ 1423.971664] env[61962]: ERROR nova.compute.manager [instance: 454eebcc-ea18-4046-ba2e-01de1c8a954a] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 1423.971664] env[61962]: ERROR nova.compute.manager [instance: 454eebcc-ea18-4046-ba2e-01de1c8a954a] return self.wait_for_task(task_ref) [ 1423.971664] env[61962]: ERROR nova.compute.manager [instance: 454eebcc-ea18-4046-ba2e-01de1c8a954a] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 1423.971664] env[61962]: ERROR nova.compute.manager [instance: 454eebcc-ea18-4046-ba2e-01de1c8a954a] return evt.wait() [ 1423.971664] env[61962]: ERROR nova.compute.manager [instance: 454eebcc-ea18-4046-ba2e-01de1c8a954a] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 1423.971664] env[61962]: ERROR nova.compute.manager [instance: 454eebcc-ea18-4046-ba2e-01de1c8a954a] result = hub.switch() [ 1423.971664] env[61962]: ERROR nova.compute.manager [instance: 454eebcc-ea18-4046-ba2e-01de1c8a954a] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 1423.971664] env[61962]: ERROR nova.compute.manager [instance: 454eebcc-ea18-4046-ba2e-01de1c8a954a] return self.greenlet.switch() [ 1423.971980] env[61962]: ERROR nova.compute.manager [instance: 454eebcc-ea18-4046-ba2e-01de1c8a954a] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 1423.971980] env[61962]: ERROR nova.compute.manager [instance: 454eebcc-ea18-4046-ba2e-01de1c8a954a] self.f(*self.args, **self.kw) [ 1423.971980] env[61962]: ERROR nova.compute.manager [instance: 454eebcc-ea18-4046-ba2e-01de1c8a954a] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 1423.971980] env[61962]: ERROR nova.compute.manager [instance: 454eebcc-ea18-4046-ba2e-01de1c8a954a] raise exceptions.translate_fault(task_info.error) [ 1423.971980] env[61962]: ERROR nova.compute.manager [instance: 454eebcc-ea18-4046-ba2e-01de1c8a954a] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1423.971980] env[61962]: ERROR nova.compute.manager [instance: 454eebcc-ea18-4046-ba2e-01de1c8a954a] Faults: ['InvalidArgument'] [ 1423.971980] env[61962]: ERROR nova.compute.manager [instance: 454eebcc-ea18-4046-ba2e-01de1c8a954a] [ 1423.971980] env[61962]: INFO nova.compute.manager [None req-e1d514b2-824e-487e-a10a-f30e148f321b tempest-AttachInterfacesTestJSON-87713823 tempest-AttachInterfacesTestJSON-87713823-project-member] [instance: 454eebcc-ea18-4046-ba2e-01de1c8a954a] Terminating instance [ 1423.973426] env[61962]: DEBUG oslo_concurrency.lockutils [None req-f8f4fbcf-abb6-411e-9491-8d17bed92cc6 tempest-ServerRescueTestJSONUnderV235-1963263010 tempest-ServerRescueTestJSONUnderV235-1963263010-project-member] Acquired lock "[datastore2] devstack-image-cache_base/f684bb17-3ab2-4bd5-a19e-4c36c57d0ebe/f684bb17-3ab2-4bd5-a19e-4c36c57d0ebe.vmdk" {{(pid=61962) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1423.973772] env[61962]: DEBUG nova.virt.vmwareapi.ds_util [None req-f8f4fbcf-abb6-411e-9491-8d17bed92cc6 tempest-ServerRescueTestJSONUnderV235-1963263010 tempest-ServerRescueTestJSONUnderV235-1963263010-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=61962) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1423.974352] env[61962]: DEBUG nova.compute.manager [None req-e1d514b2-824e-487e-a10a-f30e148f321b tempest-AttachInterfacesTestJSON-87713823 tempest-AttachInterfacesTestJSON-87713823-project-member] [instance: 454eebcc-ea18-4046-ba2e-01de1c8a954a] Start destroying the instance on the hypervisor. {{(pid=61962) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3142}} [ 1423.974545] env[61962]: DEBUG nova.virt.vmwareapi.vmops [None req-e1d514b2-824e-487e-a10a-f30e148f321b tempest-AttachInterfacesTestJSON-87713823 tempest-AttachInterfacesTestJSON-87713823-project-member] [instance: 454eebcc-ea18-4046-ba2e-01de1c8a954a] Destroying instance {{(pid=61962) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1423.974770] env[61962]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-f73850ba-5e33-4107-85a7-6e4c0c9d90b4 {{(pid=61962) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1423.977168] env[61962]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-037433cb-6068-4dec-941a-859937d76fc5 {{(pid=61962) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1423.984918] env[61962]: DEBUG nova.virt.vmwareapi.vmops [None req-e1d514b2-824e-487e-a10a-f30e148f321b tempest-AttachInterfacesTestJSON-87713823 tempest-AttachInterfacesTestJSON-87713823-project-member] [instance: 454eebcc-ea18-4046-ba2e-01de1c8a954a] Unregistering the VM {{(pid=61962) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 1423.985190] env[61962]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-edaca2ee-b4dc-4ec4-a1c7-0fee76588199 {{(pid=61962) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1423.987563] env[61962]: DEBUG nova.virt.vmwareapi.ds_util [None req-f8f4fbcf-abb6-411e-9491-8d17bed92cc6 tempest-ServerRescueTestJSONUnderV235-1963263010 tempest-ServerRescueTestJSONUnderV235-1963263010-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=61962) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1423.987731] env[61962]: DEBUG nova.virt.vmwareapi.vmops [None req-f8f4fbcf-abb6-411e-9491-8d17bed92cc6 tempest-ServerRescueTestJSONUnderV235-1963263010 tempest-ServerRescueTestJSONUnderV235-1963263010-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=61962) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 1423.988731] env[61962]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-7436f40b-b3f8-4441-b15a-4979e516b878 {{(pid=61962) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1423.994795] env[61962]: DEBUG oslo_vmware.api [None req-f8f4fbcf-abb6-411e-9491-8d17bed92cc6 tempest-ServerRescueTestJSONUnderV235-1963263010 tempest-ServerRescueTestJSONUnderV235-1963263010-project-member] Waiting for the task: (returnval){ [ 1423.994795] env[61962]: value = "session[5210918f-aadc-9b29-42fa-0929c2e42627]521554e4-62ed-82ec-cdd7-658fbe7cae40" [ 1423.994795] env[61962]: _type = "Task" [ 1423.994795] env[61962]: } to complete. {{(pid=61962) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1424.002908] env[61962]: DEBUG oslo_vmware.api [None req-f8f4fbcf-abb6-411e-9491-8d17bed92cc6 tempest-ServerRescueTestJSONUnderV235-1963263010 tempest-ServerRescueTestJSONUnderV235-1963263010-project-member] Task: {'id': session[5210918f-aadc-9b29-42fa-0929c2e42627]521554e4-62ed-82ec-cdd7-658fbe7cae40, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61962) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1424.059429] env[61962]: DEBUG nova.virt.vmwareapi.vmops [None req-e1d514b2-824e-487e-a10a-f30e148f321b tempest-AttachInterfacesTestJSON-87713823 tempest-AttachInterfacesTestJSON-87713823-project-member] [instance: 454eebcc-ea18-4046-ba2e-01de1c8a954a] Unregistered the VM {{(pid=61962) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 1424.059571] env[61962]: DEBUG nova.virt.vmwareapi.vmops [None req-e1d514b2-824e-487e-a10a-f30e148f321b tempest-AttachInterfacesTestJSON-87713823 tempest-AttachInterfacesTestJSON-87713823-project-member] [instance: 454eebcc-ea18-4046-ba2e-01de1c8a954a] Deleting contents of the VM from datastore datastore2 {{(pid=61962) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 1424.059761] env[61962]: DEBUG nova.virt.vmwareapi.ds_util [None req-e1d514b2-824e-487e-a10a-f30e148f321b tempest-AttachInterfacesTestJSON-87713823 tempest-AttachInterfacesTestJSON-87713823-project-member] Deleting the datastore file [datastore2] 454eebcc-ea18-4046-ba2e-01de1c8a954a {{(pid=61962) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1424.059932] env[61962]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-a1398418-9498-4285-bb74-e33949093647 {{(pid=61962) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1424.066468] env[61962]: DEBUG oslo_vmware.api [None req-e1d514b2-824e-487e-a10a-f30e148f321b tempest-AttachInterfacesTestJSON-87713823 tempest-AttachInterfacesTestJSON-87713823-project-member] Waiting for the task: (returnval){ [ 1424.066468] env[61962]: value = "task-4892038" [ 1424.066468] env[61962]: _type = "Task" [ 1424.066468] env[61962]: } to complete. {{(pid=61962) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1424.075839] env[61962]: DEBUG oslo_vmware.api [None req-e1d514b2-824e-487e-a10a-f30e148f321b tempest-AttachInterfacesTestJSON-87713823 tempest-AttachInterfacesTestJSON-87713823-project-member] Task: {'id': task-4892038, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61962) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1424.505893] env[61962]: DEBUG nova.virt.vmwareapi.vmops [None req-f8f4fbcf-abb6-411e-9491-8d17bed92cc6 tempest-ServerRescueTestJSONUnderV235-1963263010 tempest-ServerRescueTestJSONUnderV235-1963263010-project-member] [instance: e76b5908-3bad-4c19-90e7-b15ca253ad80] Preparing fetch location {{(pid=61962) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 1424.506229] env[61962]: DEBUG nova.virt.vmwareapi.ds_util [None req-f8f4fbcf-abb6-411e-9491-8d17bed92cc6 tempest-ServerRescueTestJSONUnderV235-1963263010 tempest-ServerRescueTestJSONUnderV235-1963263010-project-member] Creating directory with path [datastore2] vmware_temp/16fbdd88-cab8-47bf-a247-849d34708988/f684bb17-3ab2-4bd5-a19e-4c36c57d0ebe {{(pid=61962) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1424.506478] env[61962]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-fb63725b-2eda-473c-90ef-ecdd01e7c421 {{(pid=61962) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1424.519699] env[61962]: DEBUG nova.virt.vmwareapi.ds_util [None req-f8f4fbcf-abb6-411e-9491-8d17bed92cc6 tempest-ServerRescueTestJSONUnderV235-1963263010 tempest-ServerRescueTestJSONUnderV235-1963263010-project-member] Created directory with path [datastore2] vmware_temp/16fbdd88-cab8-47bf-a247-849d34708988/f684bb17-3ab2-4bd5-a19e-4c36c57d0ebe {{(pid=61962) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1424.519904] env[61962]: DEBUG nova.virt.vmwareapi.vmops [None req-f8f4fbcf-abb6-411e-9491-8d17bed92cc6 tempest-ServerRescueTestJSONUnderV235-1963263010 tempest-ServerRescueTestJSONUnderV235-1963263010-project-member] [instance: e76b5908-3bad-4c19-90e7-b15ca253ad80] Fetch image to [datastore2] vmware_temp/16fbdd88-cab8-47bf-a247-849d34708988/f684bb17-3ab2-4bd5-a19e-4c36c57d0ebe/tmp-sparse.vmdk {{(pid=61962) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 1424.520088] env[61962]: DEBUG nova.virt.vmwareapi.vmops [None req-f8f4fbcf-abb6-411e-9491-8d17bed92cc6 tempest-ServerRescueTestJSONUnderV235-1963263010 tempest-ServerRescueTestJSONUnderV235-1963263010-project-member] [instance: e76b5908-3bad-4c19-90e7-b15ca253ad80] Downloading image file data f684bb17-3ab2-4bd5-a19e-4c36c57d0ebe to [datastore2] vmware_temp/16fbdd88-cab8-47bf-a247-849d34708988/f684bb17-3ab2-4bd5-a19e-4c36c57d0ebe/tmp-sparse.vmdk on the data store datastore2 {{(pid=61962) _fetch_image_as_file /opt/stack/nova/nova/virt/vmwareapi/vmops.py:399}} [ 1424.520856] env[61962]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b91acd33-63d2-41ce-bf65-39aa6bee6705 {{(pid=61962) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1424.528522] env[61962]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dfac1a4d-e552-4b31-a32d-4e1a814b5564 {{(pid=61962) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1424.537901] env[61962]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d1673cee-06b7-465a-b922-e1c65d4b8fb5 {{(pid=61962) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1424.571945] env[61962]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-468585de-9c69-4ad0-8874-fefbb02b4b5e {{(pid=61962) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1424.579418] env[61962]: DEBUG oslo_vmware.api [None req-e1d514b2-824e-487e-a10a-f30e148f321b tempest-AttachInterfacesTestJSON-87713823 tempest-AttachInterfacesTestJSON-87713823-project-member] Task: {'id': task-4892038, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.088464} completed successfully. {{(pid=61962) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1424.580838] env[61962]: DEBUG nova.virt.vmwareapi.ds_util [None req-e1d514b2-824e-487e-a10a-f30e148f321b tempest-AttachInterfacesTestJSON-87713823 tempest-AttachInterfacesTestJSON-87713823-project-member] Deleted the datastore file {{(pid=61962) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1424.581037] env[61962]: DEBUG nova.virt.vmwareapi.vmops [None req-e1d514b2-824e-487e-a10a-f30e148f321b tempest-AttachInterfacesTestJSON-87713823 tempest-AttachInterfacesTestJSON-87713823-project-member] [instance: 454eebcc-ea18-4046-ba2e-01de1c8a954a] Deleted contents of the VM from datastore datastore2 {{(pid=61962) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 1424.581216] env[61962]: DEBUG nova.virt.vmwareapi.vmops [None req-e1d514b2-824e-487e-a10a-f30e148f321b tempest-AttachInterfacesTestJSON-87713823 tempest-AttachInterfacesTestJSON-87713823-project-member] [instance: 454eebcc-ea18-4046-ba2e-01de1c8a954a] Instance destroyed {{(pid=61962) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1424.581391] env[61962]: INFO nova.compute.manager [None req-e1d514b2-824e-487e-a10a-f30e148f321b tempest-AttachInterfacesTestJSON-87713823 tempest-AttachInterfacesTestJSON-87713823-project-member] [instance: 454eebcc-ea18-4046-ba2e-01de1c8a954a] Took 0.61 seconds to destroy the instance on the hypervisor. [ 1424.583226] env[61962]: DEBUG oslo_vmware.service [-] Invoking SessionManager.AcquireGenericServiceTicket with opID=oslo.vmware-7ce562d7-0e4e-4cf7-bb23-12b6b91a434d {{(pid=61962) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1424.585099] env[61962]: DEBUG nova.compute.claims [None req-e1d514b2-824e-487e-a10a-f30e148f321b tempest-AttachInterfacesTestJSON-87713823 tempest-AttachInterfacesTestJSON-87713823-project-member] [instance: 454eebcc-ea18-4046-ba2e-01de1c8a954a] Aborting claim: {{(pid=61962) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 1424.585274] env[61962]: DEBUG oslo_concurrency.lockutils [None req-e1d514b2-824e-487e-a10a-f30e148f321b tempest-AttachInterfacesTestJSON-87713823 tempest-AttachInterfacesTestJSON-87713823-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=61962) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1424.585486] env[61962]: DEBUG oslo_concurrency.lockutils [None req-e1d514b2-824e-487e-a10a-f30e148f321b tempest-AttachInterfacesTestJSON-87713823 tempest-AttachInterfacesTestJSON-87713823-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 0.000s {{(pid=61962) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1424.678753] env[61962]: DEBUG nova.virt.vmwareapi.images [None req-f8f4fbcf-abb6-411e-9491-8d17bed92cc6 tempest-ServerRescueTestJSONUnderV235-1963263010 tempest-ServerRescueTestJSONUnderV235-1963263010-project-member] [instance: e76b5908-3bad-4c19-90e7-b15ca253ad80] Downloading image file data f684bb17-3ab2-4bd5-a19e-4c36c57d0ebe to the data store datastore2 {{(pid=61962) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:245}} [ 1424.848433] env[61962]: DEBUG oslo_vmware.rw_handles [None req-f8f4fbcf-abb6-411e-9491-8d17bed92cc6 tempest-ServerRescueTestJSONUnderV235-1963263010 tempest-ServerRescueTestJSONUnderV235-1963263010-project-member] Creating HTTP connection to write to file with size = 21318656 and URL = https://esx7c1n1.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/16fbdd88-cab8-47bf-a247-849d34708988/f684bb17-3ab2-4bd5-a19e-4c36c57d0ebe/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=61962) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 1424.910793] env[61962]: DEBUG oslo_vmware.rw_handles [None req-f8f4fbcf-abb6-411e-9491-8d17bed92cc6 tempest-ServerRescueTestJSONUnderV235-1963263010 tempest-ServerRescueTestJSONUnderV235-1963263010-project-member] Completed reading data from the image iterator. {{(pid=61962) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 1424.911080] env[61962]: DEBUG oslo_vmware.rw_handles [None req-f8f4fbcf-abb6-411e-9491-8d17bed92cc6 tempest-ServerRescueTestJSONUnderV235-1963263010 tempest-ServerRescueTestJSONUnderV235-1963263010-project-member] Closing write handle for https://esx7c1n1.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/16fbdd88-cab8-47bf-a247-849d34708988/f684bb17-3ab2-4bd5-a19e-4c36c57d0ebe/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=61962) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:281}} [ 1424.923831] env[61962]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c188681c-41cf-4964-8808-a90062721619 {{(pid=61962) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1424.932614] env[61962]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7cbe5af5-246a-406b-aeec-6b9bfdf4ab2e {{(pid=61962) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1424.964498] env[61962]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e3cb56aa-c672-4c5d-8b04-e5b3fbeb0516 {{(pid=61962) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1424.972620] env[61962]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-48315ce6-15a6-413f-927d-80bccbe8045a {{(pid=61962) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1424.987225] env[61962]: DEBUG nova.compute.provider_tree [None req-e1d514b2-824e-487e-a10a-f30e148f321b tempest-AttachInterfacesTestJSON-87713823 tempest-AttachInterfacesTestJSON-87713823-project-member] Inventory has not changed in ProviderTree for provider: 5a20dc57-fddd-49ec-bab5-953a03eebaa1 {{(pid=61962) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1424.996940] env[61962]: DEBUG nova.scheduler.client.report [None req-e1d514b2-824e-487e-a10a-f30e148f321b tempest-AttachInterfacesTestJSON-87713823 tempest-AttachInterfacesTestJSON-87713823-project-member] Inventory has not changed for provider 5a20dc57-fddd-49ec-bab5-953a03eebaa1 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 96, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61962) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1425.014358] env[61962]: DEBUG oslo_concurrency.lockutils [None req-e1d514b2-824e-487e-a10a-f30e148f321b tempest-AttachInterfacesTestJSON-87713823 tempest-AttachInterfacesTestJSON-87713823-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 0.429s {{(pid=61962) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1425.014925] env[61962]: ERROR nova.compute.manager [None req-e1d514b2-824e-487e-a10a-f30e148f321b tempest-AttachInterfacesTestJSON-87713823 tempest-AttachInterfacesTestJSON-87713823-project-member] [instance: 454eebcc-ea18-4046-ba2e-01de1c8a954a] Failed to build and run instance: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1425.014925] env[61962]: Faults: ['InvalidArgument'] [ 1425.014925] env[61962]: ERROR nova.compute.manager [instance: 454eebcc-ea18-4046-ba2e-01de1c8a954a] Traceback (most recent call last): [ 1425.014925] env[61962]: ERROR nova.compute.manager [instance: 454eebcc-ea18-4046-ba2e-01de1c8a954a] File "/opt/stack/nova/nova/compute/manager.py", line 2633, in _build_and_run_instance [ 1425.014925] env[61962]: ERROR nova.compute.manager [instance: 454eebcc-ea18-4046-ba2e-01de1c8a954a] self.driver.spawn(context, instance, image_meta, [ 1425.014925] env[61962]: ERROR nova.compute.manager [instance: 454eebcc-ea18-4046-ba2e-01de1c8a954a] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 1425.014925] env[61962]: ERROR nova.compute.manager [instance: 454eebcc-ea18-4046-ba2e-01de1c8a954a] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1425.014925] env[61962]: ERROR nova.compute.manager [instance: 454eebcc-ea18-4046-ba2e-01de1c8a954a] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 1425.014925] env[61962]: ERROR nova.compute.manager [instance: 454eebcc-ea18-4046-ba2e-01de1c8a954a] self._fetch_image_if_missing(context, vi) [ 1425.014925] env[61962]: ERROR nova.compute.manager [instance: 454eebcc-ea18-4046-ba2e-01de1c8a954a] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 1425.014925] env[61962]: ERROR nova.compute.manager [instance: 454eebcc-ea18-4046-ba2e-01de1c8a954a] image_cache(vi, tmp_image_ds_loc) [ 1425.014925] env[61962]: ERROR nova.compute.manager [instance: 454eebcc-ea18-4046-ba2e-01de1c8a954a] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 1425.015264] env[61962]: ERROR nova.compute.manager [instance: 454eebcc-ea18-4046-ba2e-01de1c8a954a] vm_util.copy_virtual_disk( [ 1425.015264] env[61962]: ERROR nova.compute.manager [instance: 454eebcc-ea18-4046-ba2e-01de1c8a954a] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 1425.015264] env[61962]: ERROR nova.compute.manager [instance: 454eebcc-ea18-4046-ba2e-01de1c8a954a] session._wait_for_task(vmdk_copy_task) [ 1425.015264] env[61962]: ERROR nova.compute.manager [instance: 454eebcc-ea18-4046-ba2e-01de1c8a954a] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 1425.015264] env[61962]: ERROR nova.compute.manager [instance: 454eebcc-ea18-4046-ba2e-01de1c8a954a] return self.wait_for_task(task_ref) [ 1425.015264] env[61962]: ERROR nova.compute.manager [instance: 454eebcc-ea18-4046-ba2e-01de1c8a954a] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 1425.015264] env[61962]: ERROR nova.compute.manager [instance: 454eebcc-ea18-4046-ba2e-01de1c8a954a] return evt.wait() [ 1425.015264] env[61962]: ERROR nova.compute.manager [instance: 454eebcc-ea18-4046-ba2e-01de1c8a954a] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 1425.015264] env[61962]: ERROR nova.compute.manager [instance: 454eebcc-ea18-4046-ba2e-01de1c8a954a] result = hub.switch() [ 1425.015264] env[61962]: ERROR nova.compute.manager [instance: 454eebcc-ea18-4046-ba2e-01de1c8a954a] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 1425.015264] env[61962]: ERROR nova.compute.manager [instance: 454eebcc-ea18-4046-ba2e-01de1c8a954a] return self.greenlet.switch() [ 1425.015264] env[61962]: ERROR nova.compute.manager [instance: 454eebcc-ea18-4046-ba2e-01de1c8a954a] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 1425.015264] env[61962]: ERROR nova.compute.manager [instance: 454eebcc-ea18-4046-ba2e-01de1c8a954a] self.f(*self.args, **self.kw) [ 1425.015616] env[61962]: ERROR nova.compute.manager [instance: 454eebcc-ea18-4046-ba2e-01de1c8a954a] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 1425.015616] env[61962]: ERROR nova.compute.manager [instance: 454eebcc-ea18-4046-ba2e-01de1c8a954a] raise exceptions.translate_fault(task_info.error) [ 1425.015616] env[61962]: ERROR nova.compute.manager [instance: 454eebcc-ea18-4046-ba2e-01de1c8a954a] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1425.015616] env[61962]: ERROR nova.compute.manager [instance: 454eebcc-ea18-4046-ba2e-01de1c8a954a] Faults: ['InvalidArgument'] [ 1425.015616] env[61962]: ERROR nova.compute.manager [instance: 454eebcc-ea18-4046-ba2e-01de1c8a954a] [ 1425.015737] env[61962]: DEBUG nova.compute.utils [None req-e1d514b2-824e-487e-a10a-f30e148f321b tempest-AttachInterfacesTestJSON-87713823 tempest-AttachInterfacesTestJSON-87713823-project-member] [instance: 454eebcc-ea18-4046-ba2e-01de1c8a954a] VimFaultException {{(pid=61962) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 1425.017257] env[61962]: DEBUG nova.compute.manager [None req-e1d514b2-824e-487e-a10a-f30e148f321b tempest-AttachInterfacesTestJSON-87713823 tempest-AttachInterfacesTestJSON-87713823-project-member] [instance: 454eebcc-ea18-4046-ba2e-01de1c8a954a] Build of instance 454eebcc-ea18-4046-ba2e-01de1c8a954a was re-scheduled: A specified parameter was not correct: fileType [ 1425.017257] env[61962]: Faults: ['InvalidArgument'] {{(pid=61962) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2472}} [ 1425.017631] env[61962]: DEBUG nova.compute.manager [None req-e1d514b2-824e-487e-a10a-f30e148f321b tempest-AttachInterfacesTestJSON-87713823 tempest-AttachInterfacesTestJSON-87713823-project-member] [instance: 454eebcc-ea18-4046-ba2e-01de1c8a954a] Unplugging VIFs for instance {{(pid=61962) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:2998}} [ 1425.017802] env[61962]: DEBUG nova.compute.manager [None req-e1d514b2-824e-487e-a10a-f30e148f321b tempest-AttachInterfacesTestJSON-87713823 tempest-AttachInterfacesTestJSON-87713823-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=61962) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3021}} [ 1425.017971] env[61962]: DEBUG nova.compute.manager [None req-e1d514b2-824e-487e-a10a-f30e148f321b tempest-AttachInterfacesTestJSON-87713823 tempest-AttachInterfacesTestJSON-87713823-project-member] [instance: 454eebcc-ea18-4046-ba2e-01de1c8a954a] Deallocating network for instance {{(pid=61962) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2281}} [ 1425.018152] env[61962]: DEBUG nova.network.neutron [None req-e1d514b2-824e-487e-a10a-f30e148f321b tempest-AttachInterfacesTestJSON-87713823 tempest-AttachInterfacesTestJSON-87713823-project-member] [instance: 454eebcc-ea18-4046-ba2e-01de1c8a954a] deallocate_for_instance() {{(pid=61962) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1425.370252] env[61962]: DEBUG nova.network.neutron [None req-e1d514b2-824e-487e-a10a-f30e148f321b tempest-AttachInterfacesTestJSON-87713823 tempest-AttachInterfacesTestJSON-87713823-project-member] [instance: 454eebcc-ea18-4046-ba2e-01de1c8a954a] Updating instance_info_cache with network_info: [] {{(pid=61962) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1425.389991] env[61962]: INFO nova.compute.manager [None req-e1d514b2-824e-487e-a10a-f30e148f321b tempest-AttachInterfacesTestJSON-87713823 tempest-AttachInterfacesTestJSON-87713823-project-member] [instance: 454eebcc-ea18-4046-ba2e-01de1c8a954a] Took 0.37 seconds to deallocate network for instance. [ 1425.508450] env[61962]: INFO nova.scheduler.client.report [None req-e1d514b2-824e-487e-a10a-f30e148f321b tempest-AttachInterfacesTestJSON-87713823 tempest-AttachInterfacesTestJSON-87713823-project-member] Deleted allocations for instance 454eebcc-ea18-4046-ba2e-01de1c8a954a [ 1425.539695] env[61962]: DEBUG oslo_concurrency.lockutils [None req-e1d514b2-824e-487e-a10a-f30e148f321b tempest-AttachInterfacesTestJSON-87713823 tempest-AttachInterfacesTestJSON-87713823-project-member] Lock "454eebcc-ea18-4046-ba2e-01de1c8a954a" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 628.360s {{(pid=61962) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1425.539695] env[61962]: DEBUG oslo_concurrency.lockutils [None req-970286b6-8e70-4710-b1f2-9dd9b0fe75ad tempest-AttachInterfacesTestJSON-87713823 tempest-AttachInterfacesTestJSON-87713823-project-member] Lock "454eebcc-ea18-4046-ba2e-01de1c8a954a" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 431.001s {{(pid=61962) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1425.539695] env[61962]: DEBUG oslo_concurrency.lockutils [None req-970286b6-8e70-4710-b1f2-9dd9b0fe75ad tempest-AttachInterfacesTestJSON-87713823 tempest-AttachInterfacesTestJSON-87713823-project-member] Acquiring lock "454eebcc-ea18-4046-ba2e-01de1c8a954a-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=61962) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1425.539924] env[61962]: DEBUG oslo_concurrency.lockutils [None req-970286b6-8e70-4710-b1f2-9dd9b0fe75ad tempest-AttachInterfacesTestJSON-87713823 tempest-AttachInterfacesTestJSON-87713823-project-member] Lock "454eebcc-ea18-4046-ba2e-01de1c8a954a-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=61962) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1425.539924] env[61962]: DEBUG oslo_concurrency.lockutils [None req-970286b6-8e70-4710-b1f2-9dd9b0fe75ad tempest-AttachInterfacesTestJSON-87713823 tempest-AttachInterfacesTestJSON-87713823-project-member] Lock "454eebcc-ea18-4046-ba2e-01de1c8a954a-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=61962) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1425.540889] env[61962]: INFO nova.compute.manager [None req-970286b6-8e70-4710-b1f2-9dd9b0fe75ad tempest-AttachInterfacesTestJSON-87713823 tempest-AttachInterfacesTestJSON-87713823-project-member] [instance: 454eebcc-ea18-4046-ba2e-01de1c8a954a] Terminating instance [ 1425.543807] env[61962]: DEBUG nova.compute.manager [None req-970286b6-8e70-4710-b1f2-9dd9b0fe75ad tempest-AttachInterfacesTestJSON-87713823 tempest-AttachInterfacesTestJSON-87713823-project-member] [instance: 454eebcc-ea18-4046-ba2e-01de1c8a954a] Start destroying the instance on the hypervisor. {{(pid=61962) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3142}} [ 1425.543995] env[61962]: DEBUG nova.virt.vmwareapi.vmops [None req-970286b6-8e70-4710-b1f2-9dd9b0fe75ad tempest-AttachInterfacesTestJSON-87713823 tempest-AttachInterfacesTestJSON-87713823-project-member] [instance: 454eebcc-ea18-4046-ba2e-01de1c8a954a] Destroying instance {{(pid=61962) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1425.544273] env[61962]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-c2d755ae-3daa-4fc6-9d84-f46f3bd655bc {{(pid=61962) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1425.557070] env[61962]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-aeb621ce-18d6-46b0-bb5a-cf0568d92bfb {{(pid=61962) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1425.572809] env[61962]: DEBUG nova.compute.manager [None req-792ae9a9-0dda-4d94-b11f-0fec504684c6 tempest-ServerRescueNegativeTestJSON-1391422182 tempest-ServerRescueNegativeTestJSON-1391422182-project-member] [instance: 2d9a9155-1dbb-4984-99dd-d40552eef7db] Starting instance... {{(pid=61962) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2424}} [ 1425.589803] env[61962]: WARNING nova.virt.vmwareapi.vmops [None req-970286b6-8e70-4710-b1f2-9dd9b0fe75ad tempest-AttachInterfacesTestJSON-87713823 tempest-AttachInterfacesTestJSON-87713823-project-member] [instance: 454eebcc-ea18-4046-ba2e-01de1c8a954a] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 454eebcc-ea18-4046-ba2e-01de1c8a954a could not be found. [ 1425.590013] env[61962]: DEBUG nova.virt.vmwareapi.vmops [None req-970286b6-8e70-4710-b1f2-9dd9b0fe75ad tempest-AttachInterfacesTestJSON-87713823 tempest-AttachInterfacesTestJSON-87713823-project-member] [instance: 454eebcc-ea18-4046-ba2e-01de1c8a954a] Instance destroyed {{(pid=61962) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1425.590498] env[61962]: INFO nova.compute.manager [None req-970286b6-8e70-4710-b1f2-9dd9b0fe75ad tempest-AttachInterfacesTestJSON-87713823 tempest-AttachInterfacesTestJSON-87713823-project-member] [instance: 454eebcc-ea18-4046-ba2e-01de1c8a954a] Took 0.05 seconds to destroy the instance on the hypervisor. [ 1425.590498] env[61962]: DEBUG oslo.service.loopingcall [None req-970286b6-8e70-4710-b1f2-9dd9b0fe75ad tempest-AttachInterfacesTestJSON-87713823 tempest-AttachInterfacesTestJSON-87713823-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61962) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1425.591634] env[61962]: DEBUG nova.compute.manager [-] [instance: 454eebcc-ea18-4046-ba2e-01de1c8a954a] Deallocating network for instance {{(pid=61962) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2281}} [ 1425.591634] env[61962]: DEBUG nova.network.neutron [-] [instance: 454eebcc-ea18-4046-ba2e-01de1c8a954a] deallocate_for_instance() {{(pid=61962) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1425.604325] env[61962]: DEBUG nova.compute.manager [None req-792ae9a9-0dda-4d94-b11f-0fec504684c6 tempest-ServerRescueNegativeTestJSON-1391422182 tempest-ServerRescueNegativeTestJSON-1391422182-project-member] [instance: 2d9a9155-1dbb-4984-99dd-d40552eef7db] Instance disappeared before build. {{(pid=61962) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2431}} [ 1425.620571] env[61962]: DEBUG nova.network.neutron [-] [instance: 454eebcc-ea18-4046-ba2e-01de1c8a954a] Updating instance_info_cache with network_info: [] {{(pid=61962) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1425.633189] env[61962]: DEBUG oslo_concurrency.lockutils [None req-792ae9a9-0dda-4d94-b11f-0fec504684c6 tempest-ServerRescueNegativeTestJSON-1391422182 tempest-ServerRescueNegativeTestJSON-1391422182-project-member] Lock "2d9a9155-1dbb-4984-99dd-d40552eef7db" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 227.822s {{(pid=61962) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1425.639137] env[61962]: INFO nova.compute.manager [-] [instance: 454eebcc-ea18-4046-ba2e-01de1c8a954a] Took 0.05 seconds to deallocate network for instance. [ 1425.660243] env[61962]: DEBUG nova.compute.manager [None req-59aee293-c4f4-48db-95b3-8f2c81fcf69f tempest-ServerRescueNegativeTestJSON-1391422182 tempest-ServerRescueNegativeTestJSON-1391422182-project-member] [instance: a2209177-23ac-4367-9736-2978ffd6fb39] Starting instance... {{(pid=61962) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2424}} [ 1425.693515] env[61962]: DEBUG nova.compute.manager [None req-59aee293-c4f4-48db-95b3-8f2c81fcf69f tempest-ServerRescueNegativeTestJSON-1391422182 tempest-ServerRescueNegativeTestJSON-1391422182-project-member] [instance: a2209177-23ac-4367-9736-2978ffd6fb39] Instance disappeared before build. {{(pid=61962) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2431}} [ 1425.719864] env[61962]: DEBUG oslo_concurrency.lockutils [None req-59aee293-c4f4-48db-95b3-8f2c81fcf69f tempest-ServerRescueNegativeTestJSON-1391422182 tempest-ServerRescueNegativeTestJSON-1391422182-project-member] Lock "a2209177-23ac-4367-9736-2978ffd6fb39" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 227.439s {{(pid=61962) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1425.734634] env[61962]: DEBUG nova.compute.manager [None req-9eb76157-2b61-4f0b-aec0-84ea1a13fdbd tempest-AttachVolumeNegativeTest-1446100430 tempest-AttachVolumeNegativeTest-1446100430-project-member] [instance: 5b261fba-bbdf-4ba9-a065-e833f6e44ba2] Starting instance... {{(pid=61962) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2424}} [ 1425.770231] env[61962]: DEBUG nova.compute.manager [None req-9eb76157-2b61-4f0b-aec0-84ea1a13fdbd tempest-AttachVolumeNegativeTest-1446100430 tempest-AttachVolumeNegativeTest-1446100430-project-member] [instance: 5b261fba-bbdf-4ba9-a065-e833f6e44ba2] Instance disappeared before build. {{(pid=61962) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2431}} [ 1425.781909] env[61962]: DEBUG oslo_concurrency.lockutils [None req-970286b6-8e70-4710-b1f2-9dd9b0fe75ad tempest-AttachInterfacesTestJSON-87713823 tempest-AttachInterfacesTestJSON-87713823-project-member] Lock "454eebcc-ea18-4046-ba2e-01de1c8a954a" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 0.243s {{(pid=61962) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1425.782073] env[61962]: DEBUG oslo_concurrency.lockutils [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Lock "454eebcc-ea18-4046-ba2e-01de1c8a954a" acquired by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: waited 295.265s {{(pid=61962) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1425.782289] env[61962]: INFO nova.compute.manager [None req-ba337279-4320-40ca-b616-7e590432f203 None None] [instance: 454eebcc-ea18-4046-ba2e-01de1c8a954a] During sync_power_state the instance has a pending task (deleting). Skip. [ 1425.782583] env[61962]: DEBUG oslo_concurrency.lockutils [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Lock "454eebcc-ea18-4046-ba2e-01de1c8a954a" "released" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: held 0.000s {{(pid=61962) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1425.799547] env[61962]: DEBUG oslo_concurrency.lockutils [None req-9eb76157-2b61-4f0b-aec0-84ea1a13fdbd tempest-AttachVolumeNegativeTest-1446100430 tempest-AttachVolumeNegativeTest-1446100430-project-member] Lock "5b261fba-bbdf-4ba9-a065-e833f6e44ba2" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 215.290s {{(pid=61962) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1425.812237] env[61962]: DEBUG nova.compute.manager [None req-daf488bf-6beb-4599-adaa-f153a1040e49 tempest-InstanceActionsNegativeTestJSON-360330678 tempest-InstanceActionsNegativeTestJSON-360330678-project-member] [instance: 4453ec6d-57f7-4ae1-bafa-4dd993c22d03] Starting instance... {{(pid=61962) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2424}} [ 1425.836526] env[61962]: DEBUG nova.compute.manager [None req-daf488bf-6beb-4599-adaa-f153a1040e49 tempest-InstanceActionsNegativeTestJSON-360330678 tempest-InstanceActionsNegativeTestJSON-360330678-project-member] [instance: 4453ec6d-57f7-4ae1-bafa-4dd993c22d03] Instance disappeared before build. {{(pid=61962) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2431}} [ 1425.859226] env[61962]: DEBUG oslo_concurrency.lockutils [None req-daf488bf-6beb-4599-adaa-f153a1040e49 tempest-InstanceActionsNegativeTestJSON-360330678 tempest-InstanceActionsNegativeTestJSON-360330678-project-member] Lock "4453ec6d-57f7-4ae1-bafa-4dd993c22d03" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 203.928s {{(pid=61962) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1425.868913] env[61962]: DEBUG nova.compute.manager [None req-804cf4ae-423f-40bf-ba35-ecefab5e1417 tempest-ServerDiskConfigTestJSON-1190480345 tempest-ServerDiskConfigTestJSON-1190480345-project-member] [instance: be426e8c-aac5-4510-a9e2-20cc79a00cae] Starting instance... {{(pid=61962) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2424}} [ 1425.921414] env[61962]: DEBUG nova.compute.manager [None req-804cf4ae-423f-40bf-ba35-ecefab5e1417 tempest-ServerDiskConfigTestJSON-1190480345 tempest-ServerDiskConfigTestJSON-1190480345-project-member] [instance: be426e8c-aac5-4510-a9e2-20cc79a00cae] Instance disappeared before build. {{(pid=61962) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2431}} [ 1425.946902] env[61962]: DEBUG oslo_concurrency.lockutils [None req-804cf4ae-423f-40bf-ba35-ecefab5e1417 tempest-ServerDiskConfigTestJSON-1190480345 tempest-ServerDiskConfigTestJSON-1190480345-project-member] Lock "be426e8c-aac5-4510-a9e2-20cc79a00cae" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 200.733s {{(pid=61962) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1425.960753] env[61962]: DEBUG nova.compute.manager [None req-e8b3351b-9bef-4d51-80fa-bf03ad584b12 tempest-ServersNegativeTestJSON-1349715905 tempest-ServersNegativeTestJSON-1349715905-project-member] [instance: e62eace3-3320-441e-becb-abff7cdd6660] Starting instance... {{(pid=61962) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2424}} [ 1425.986770] env[61962]: DEBUG nova.compute.manager [None req-e8b3351b-9bef-4d51-80fa-bf03ad584b12 tempest-ServersNegativeTestJSON-1349715905 tempest-ServersNegativeTestJSON-1349715905-project-member] [instance: e62eace3-3320-441e-becb-abff7cdd6660] Instance disappeared before build. {{(pid=61962) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2431}} [ 1426.011174] env[61962]: DEBUG oslo_concurrency.lockutils [None req-e8b3351b-9bef-4d51-80fa-bf03ad584b12 tempest-ServersNegativeTestJSON-1349715905 tempest-ServersNegativeTestJSON-1349715905-project-member] Lock "e62eace3-3320-441e-becb-abff7cdd6660" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 197.461s {{(pid=61962) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1426.022505] env[61962]: DEBUG nova.compute.manager [None req-ae999722-5278-4a0e-8508-ee195677c4b3 tempest-ServerMetadataTestJSON-1606436818 tempest-ServerMetadataTestJSON-1606436818-project-member] [instance: 3d2749f8-18d2-4631-adc2-093988125eb4] Starting instance... {{(pid=61962) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2424}} [ 1426.089249] env[61962]: DEBUG oslo_concurrency.lockutils [None req-ae999722-5278-4a0e-8508-ee195677c4b3 tempest-ServerMetadataTestJSON-1606436818 tempest-ServerMetadataTestJSON-1606436818-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61962) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1426.089655] env[61962]: DEBUG oslo_concurrency.lockutils [None req-ae999722-5278-4a0e-8508-ee195677c4b3 tempest-ServerMetadataTestJSON-1606436818 tempest-ServerMetadataTestJSON-1606436818-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.001s {{(pid=61962) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1426.092608] env[61962]: INFO nova.compute.claims [None req-ae999722-5278-4a0e-8508-ee195677c4b3 tempest-ServerMetadataTestJSON-1606436818 tempest-ServerMetadataTestJSON-1606436818-project-member] [instance: 3d2749f8-18d2-4631-adc2-093988125eb4] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1426.433325] env[61962]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-01f8cdc3-1446-4e92-a1f8-6386c1610775 {{(pid=61962) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1426.440921] env[61962]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bbad8c3a-3c85-4a16-950e-d4aae3d1f89d {{(pid=61962) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1426.483024] env[61962]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2f9ea2e2-342b-48aa-a91e-b6f043f3ab60 {{(pid=61962) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1426.489633] env[61962]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d54ee445-351e-490f-8b55-216fd8834864 {{(pid=61962) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1426.509880] env[61962]: DEBUG nova.compute.provider_tree [None req-ae999722-5278-4a0e-8508-ee195677c4b3 tempest-ServerMetadataTestJSON-1606436818 tempest-ServerMetadataTestJSON-1606436818-project-member] Inventory has not changed in ProviderTree for provider: 5a20dc57-fddd-49ec-bab5-953a03eebaa1 {{(pid=61962) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1426.524019] env[61962]: DEBUG nova.scheduler.client.report [None req-ae999722-5278-4a0e-8508-ee195677c4b3 tempest-ServerMetadataTestJSON-1606436818 tempest-ServerMetadataTestJSON-1606436818-project-member] Inventory has not changed for provider 5a20dc57-fddd-49ec-bab5-953a03eebaa1 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 96, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61962) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1426.543223] env[61962]: DEBUG oslo_concurrency.lockutils [None req-ae999722-5278-4a0e-8508-ee195677c4b3 tempest-ServerMetadataTestJSON-1606436818 tempest-ServerMetadataTestJSON-1606436818-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.454s {{(pid=61962) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1426.543765] env[61962]: DEBUG nova.compute.manager [None req-ae999722-5278-4a0e-8508-ee195677c4b3 tempest-ServerMetadataTestJSON-1606436818 tempest-ServerMetadataTestJSON-1606436818-project-member] [instance: 3d2749f8-18d2-4631-adc2-093988125eb4] Start building networks asynchronously for instance. {{(pid=61962) _build_resources /opt/stack/nova/nova/compute/manager.py:2821}} [ 1426.585710] env[61962]: DEBUG nova.compute.utils [None req-ae999722-5278-4a0e-8508-ee195677c4b3 tempest-ServerMetadataTestJSON-1606436818 tempest-ServerMetadataTestJSON-1606436818-project-member] Using /dev/sd instead of None {{(pid=61962) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1426.587459] env[61962]: DEBUG nova.compute.manager [None req-ae999722-5278-4a0e-8508-ee195677c4b3 tempest-ServerMetadataTestJSON-1606436818 tempest-ServerMetadataTestJSON-1606436818-project-member] [instance: 3d2749f8-18d2-4631-adc2-093988125eb4] Allocating IP information in the background. {{(pid=61962) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1973}} [ 1426.587747] env[61962]: DEBUG nova.network.neutron [None req-ae999722-5278-4a0e-8508-ee195677c4b3 tempest-ServerMetadataTestJSON-1606436818 tempest-ServerMetadataTestJSON-1606436818-project-member] [instance: 3d2749f8-18d2-4631-adc2-093988125eb4] allocate_for_instance() {{(pid=61962) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 1426.598551] env[61962]: DEBUG nova.compute.manager [None req-ae999722-5278-4a0e-8508-ee195677c4b3 tempest-ServerMetadataTestJSON-1606436818 tempest-ServerMetadataTestJSON-1606436818-project-member] [instance: 3d2749f8-18d2-4631-adc2-093988125eb4] Start building block device mappings for instance. {{(pid=61962) _build_resources /opt/stack/nova/nova/compute/manager.py:2856}} [ 1426.671245] env[61962]: DEBUG nova.compute.manager [None req-ae999722-5278-4a0e-8508-ee195677c4b3 tempest-ServerMetadataTestJSON-1606436818 tempest-ServerMetadataTestJSON-1606436818-project-member] [instance: 3d2749f8-18d2-4631-adc2-093988125eb4] Start spawning the instance on the hypervisor. {{(pid=61962) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2630}} [ 1426.694035] env[61962]: DEBUG nova.policy [None req-ae999722-5278-4a0e-8508-ee195677c4b3 tempest-ServerMetadataTestJSON-1606436818 tempest-ServerMetadataTestJSON-1606436818-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '8eef4e009bb1468b91fddf035ce8ee5b', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '34477c98bf9c4a87971a62cb96b072c7', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61962) authorize /opt/stack/nova/nova/policy.py:203}} [ 1426.704837] env[61962]: DEBUG nova.virt.hardware [None req-ae999722-5278-4a0e-8508-ee195677c4b3 tempest-ServerMetadataTestJSON-1606436818 tempest-ServerMetadataTestJSON-1606436818-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-12-01T12:09:23Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=128,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-12-01T12:09:07Z,direct_url=,disk_format='vmdk',id=f684bb17-3ab2-4bd5-a19e-4c36c57d0ebe,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='eef556fb5c4f49b889491ae31a496de5',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-12-01T12:09:08Z,virtual_size=,visibility=), allow threads: False {{(pid=61962) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1426.705067] env[61962]: DEBUG nova.virt.hardware [None req-ae999722-5278-4a0e-8508-ee195677c4b3 tempest-ServerMetadataTestJSON-1606436818 tempest-ServerMetadataTestJSON-1606436818-project-member] Flavor limits 0:0:0 {{(pid=61962) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1426.705226] env[61962]: DEBUG nova.virt.hardware [None req-ae999722-5278-4a0e-8508-ee195677c4b3 tempest-ServerMetadataTestJSON-1606436818 tempest-ServerMetadataTestJSON-1606436818-project-member] Image limits 0:0:0 {{(pid=61962) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1426.705408] env[61962]: DEBUG nova.virt.hardware [None req-ae999722-5278-4a0e-8508-ee195677c4b3 tempest-ServerMetadataTestJSON-1606436818 tempest-ServerMetadataTestJSON-1606436818-project-member] Flavor pref 0:0:0 {{(pid=61962) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1426.705550] env[61962]: DEBUG nova.virt.hardware [None req-ae999722-5278-4a0e-8508-ee195677c4b3 tempest-ServerMetadataTestJSON-1606436818 tempest-ServerMetadataTestJSON-1606436818-project-member] Image pref 0:0:0 {{(pid=61962) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1426.705695] env[61962]: DEBUG nova.virt.hardware [None req-ae999722-5278-4a0e-8508-ee195677c4b3 tempest-ServerMetadataTestJSON-1606436818 tempest-ServerMetadataTestJSON-1606436818-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61962) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1426.705900] env[61962]: DEBUG nova.virt.hardware [None req-ae999722-5278-4a0e-8508-ee195677c4b3 tempest-ServerMetadataTestJSON-1606436818 tempest-ServerMetadataTestJSON-1606436818-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61962) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1426.706117] env[61962]: DEBUG nova.virt.hardware [None req-ae999722-5278-4a0e-8508-ee195677c4b3 tempest-ServerMetadataTestJSON-1606436818 tempest-ServerMetadataTestJSON-1606436818-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61962) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1426.706282] env[61962]: DEBUG nova.virt.hardware [None req-ae999722-5278-4a0e-8508-ee195677c4b3 tempest-ServerMetadataTestJSON-1606436818 tempest-ServerMetadataTestJSON-1606436818-project-member] Got 1 possible topologies {{(pid=61962) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1426.706446] env[61962]: DEBUG nova.virt.hardware [None req-ae999722-5278-4a0e-8508-ee195677c4b3 tempest-ServerMetadataTestJSON-1606436818 tempest-ServerMetadataTestJSON-1606436818-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61962) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1426.706624] env[61962]: DEBUG nova.virt.hardware [None req-ae999722-5278-4a0e-8508-ee195677c4b3 tempest-ServerMetadataTestJSON-1606436818 tempest-ServerMetadataTestJSON-1606436818-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61962) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1426.707745] env[61962]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8f459fbc-59e4-4c34-b015-442db34b5507 {{(pid=61962) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1426.716010] env[61962]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2dbb520a-af66-4c21-829a-33e2b1418094 {{(pid=61962) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1427.171406] env[61962]: DEBUG nova.network.neutron [None req-ae999722-5278-4a0e-8508-ee195677c4b3 tempest-ServerMetadataTestJSON-1606436818 tempest-ServerMetadataTestJSON-1606436818-project-member] [instance: 3d2749f8-18d2-4631-adc2-093988125eb4] Successfully created port: 5d003f22-c89e-4bee-b760-764ae5e48397 {{(pid=61962) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1427.808724] env[61962]: DEBUG nova.compute.manager [req-71750656-9105-4509-884d-c2f976bf47e3 req-470b08d8-d55d-4b79-aec1-f934dfe857ec service nova] [instance: 3d2749f8-18d2-4631-adc2-093988125eb4] Received event network-vif-plugged-5d003f22-c89e-4bee-b760-764ae5e48397 {{(pid=61962) external_instance_event /opt/stack/nova/nova/compute/manager.py:11210}} [ 1427.808954] env[61962]: DEBUG oslo_concurrency.lockutils [req-71750656-9105-4509-884d-c2f976bf47e3 req-470b08d8-d55d-4b79-aec1-f934dfe857ec service nova] Acquiring lock "3d2749f8-18d2-4631-adc2-093988125eb4-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=61962) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1427.809073] env[61962]: DEBUG oslo_concurrency.lockutils [req-71750656-9105-4509-884d-c2f976bf47e3 req-470b08d8-d55d-4b79-aec1-f934dfe857ec service nova] Lock "3d2749f8-18d2-4631-adc2-093988125eb4-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=61962) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1427.809195] env[61962]: DEBUG oslo_concurrency.lockutils [req-71750656-9105-4509-884d-c2f976bf47e3 req-470b08d8-d55d-4b79-aec1-f934dfe857ec service nova] Lock "3d2749f8-18d2-4631-adc2-093988125eb4-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=61962) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1427.809343] env[61962]: DEBUG nova.compute.manager [req-71750656-9105-4509-884d-c2f976bf47e3 req-470b08d8-d55d-4b79-aec1-f934dfe857ec service nova] [instance: 3d2749f8-18d2-4631-adc2-093988125eb4] No waiting events found dispatching network-vif-plugged-5d003f22-c89e-4bee-b760-764ae5e48397 {{(pid=61962) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1427.809512] env[61962]: WARNING nova.compute.manager [req-71750656-9105-4509-884d-c2f976bf47e3 req-470b08d8-d55d-4b79-aec1-f934dfe857ec service nova] [instance: 3d2749f8-18d2-4631-adc2-093988125eb4] Received unexpected event network-vif-plugged-5d003f22-c89e-4bee-b760-764ae5e48397 for instance with vm_state building and task_state spawning. [ 1427.958442] env[61962]: DEBUG nova.network.neutron [None req-ae999722-5278-4a0e-8508-ee195677c4b3 tempest-ServerMetadataTestJSON-1606436818 tempest-ServerMetadataTestJSON-1606436818-project-member] [instance: 3d2749f8-18d2-4631-adc2-093988125eb4] Successfully updated port: 5d003f22-c89e-4bee-b760-764ae5e48397 {{(pid=61962) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1427.971284] env[61962]: DEBUG oslo_concurrency.lockutils [None req-ae999722-5278-4a0e-8508-ee195677c4b3 tempest-ServerMetadataTestJSON-1606436818 tempest-ServerMetadataTestJSON-1606436818-project-member] Acquiring lock "refresh_cache-3d2749f8-18d2-4631-adc2-093988125eb4" {{(pid=61962) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1427.971871] env[61962]: DEBUG oslo_concurrency.lockutils [None req-ae999722-5278-4a0e-8508-ee195677c4b3 tempest-ServerMetadataTestJSON-1606436818 tempest-ServerMetadataTestJSON-1606436818-project-member] Acquired lock "refresh_cache-3d2749f8-18d2-4631-adc2-093988125eb4" {{(pid=61962) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1427.972046] env[61962]: DEBUG nova.network.neutron [None req-ae999722-5278-4a0e-8508-ee195677c4b3 tempest-ServerMetadataTestJSON-1606436818 tempest-ServerMetadataTestJSON-1606436818-project-member] [instance: 3d2749f8-18d2-4631-adc2-093988125eb4] Building network info cache for instance {{(pid=61962) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 1428.261208] env[61962]: DEBUG nova.network.neutron [None req-ae999722-5278-4a0e-8508-ee195677c4b3 tempest-ServerMetadataTestJSON-1606436818 tempest-ServerMetadataTestJSON-1606436818-project-member] [instance: 3d2749f8-18d2-4631-adc2-093988125eb4] Instance cache missing network info. {{(pid=61962) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 1428.474620] env[61962]: DEBUG nova.network.neutron [None req-ae999722-5278-4a0e-8508-ee195677c4b3 tempest-ServerMetadataTestJSON-1606436818 tempest-ServerMetadataTestJSON-1606436818-project-member] [instance: 3d2749f8-18d2-4631-adc2-093988125eb4] Updating instance_info_cache with network_info: [{"id": "5d003f22-c89e-4bee-b760-764ae5e48397", "address": "fa:16:3e:cf:7e:d9", "network": {"id": "7126a13b-1d17-46a1-aa40-588932e7b5b0", "bridge": "br-int", "label": "tempest-ServerMetadataTestJSON-2071951343-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "34477c98bf9c4a87971a62cb96b072c7", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "cba18f15-a919-422e-a423-1e705e233389", "external-id": "nsx-vlan-transportzone-79", "segmentation_id": 79, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap5d003f22-c8", "ovs_interfaceid": "5d003f22-c89e-4bee-b760-764ae5e48397", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61962) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1428.485784] env[61962]: DEBUG oslo_concurrency.lockutils [None req-ae999722-5278-4a0e-8508-ee195677c4b3 tempest-ServerMetadataTestJSON-1606436818 tempest-ServerMetadataTestJSON-1606436818-project-member] Releasing lock "refresh_cache-3d2749f8-18d2-4631-adc2-093988125eb4" {{(pid=61962) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1428.486077] env[61962]: DEBUG nova.compute.manager [None req-ae999722-5278-4a0e-8508-ee195677c4b3 tempest-ServerMetadataTestJSON-1606436818 tempest-ServerMetadataTestJSON-1606436818-project-member] [instance: 3d2749f8-18d2-4631-adc2-093988125eb4] Instance network_info: |[{"id": "5d003f22-c89e-4bee-b760-764ae5e48397", "address": "fa:16:3e:cf:7e:d9", "network": {"id": "7126a13b-1d17-46a1-aa40-588932e7b5b0", "bridge": "br-int", "label": "tempest-ServerMetadataTestJSON-2071951343-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "34477c98bf9c4a87971a62cb96b072c7", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "cba18f15-a919-422e-a423-1e705e233389", "external-id": "nsx-vlan-transportzone-79", "segmentation_id": 79, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap5d003f22-c8", "ovs_interfaceid": "5d003f22-c89e-4bee-b760-764ae5e48397", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=61962) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1428.486473] env[61962]: DEBUG nova.virt.vmwareapi.vmops [None req-ae999722-5278-4a0e-8508-ee195677c4b3 tempest-ServerMetadataTestJSON-1606436818 tempest-ServerMetadataTestJSON-1606436818-project-member] [instance: 3d2749f8-18d2-4631-adc2-093988125eb4] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:cf:7e:d9', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'cba18f15-a919-422e-a423-1e705e233389', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '5d003f22-c89e-4bee-b760-764ae5e48397', 'vif_model': 'vmxnet3'}] {{(pid=61962) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1428.493807] env[61962]: DEBUG nova.virt.vmwareapi.vm_util [None req-ae999722-5278-4a0e-8508-ee195677c4b3 tempest-ServerMetadataTestJSON-1606436818 tempest-ServerMetadataTestJSON-1606436818-project-member] Creating folder: Project (34477c98bf9c4a87971a62cb96b072c7). Parent ref: group-v953327. {{(pid=61962) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 1428.494330] env[61962]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-7688d70c-dc23-4a00-be8c-5ccd193e46ac {{(pid=61962) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1428.507044] env[61962]: INFO nova.virt.vmwareapi.vm_util [None req-ae999722-5278-4a0e-8508-ee195677c4b3 tempest-ServerMetadataTestJSON-1606436818 tempest-ServerMetadataTestJSON-1606436818-project-member] Created folder: Project (34477c98bf9c4a87971a62cb96b072c7) in parent group-v953327. [ 1428.507171] env[61962]: DEBUG nova.virt.vmwareapi.vm_util [None req-ae999722-5278-4a0e-8508-ee195677c4b3 tempest-ServerMetadataTestJSON-1606436818 tempest-ServerMetadataTestJSON-1606436818-project-member] Creating folder: Instances. Parent ref: group-v953407. {{(pid=61962) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 1428.507416] env[61962]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-70585f20-f322-4034-a966-0ff7668c3739 {{(pid=61962) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1428.516646] env[61962]: INFO nova.virt.vmwareapi.vm_util [None req-ae999722-5278-4a0e-8508-ee195677c4b3 tempest-ServerMetadataTestJSON-1606436818 tempest-ServerMetadataTestJSON-1606436818-project-member] Created folder: Instances in parent group-v953407. [ 1428.516960] env[61962]: DEBUG oslo.service.loopingcall [None req-ae999722-5278-4a0e-8508-ee195677c4b3 tempest-ServerMetadataTestJSON-1606436818 tempest-ServerMetadataTestJSON-1606436818-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=61962) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1428.517205] env[61962]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 3d2749f8-18d2-4631-adc2-093988125eb4] Creating VM on the ESX host {{(pid=61962) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 1428.517451] env[61962]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-092b1722-791d-4388-bb3f-9a612ae542fe {{(pid=61962) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1428.539592] env[61962]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1428.539592] env[61962]: value = "task-4892041" [ 1428.539592] env[61962]: _type = "Task" [ 1428.539592] env[61962]: } to complete. {{(pid=61962) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1428.548547] env[61962]: DEBUG oslo_vmware.api [-] Task: {'id': task-4892041, 'name': CreateVM_Task} progress is 0%. {{(pid=61962) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1429.050124] env[61962]: DEBUG oslo_vmware.api [-] Task: {'id': task-4892041, 'name': CreateVM_Task, 'duration_secs': 0.345908} completed successfully. {{(pid=61962) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1429.050632] env[61962]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 3d2749f8-18d2-4631-adc2-093988125eb4] Created VM on the ESX host {{(pid=61962) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 1429.051514] env[61962]: DEBUG oslo_concurrency.lockutils [None req-ae999722-5278-4a0e-8508-ee195677c4b3 tempest-ServerMetadataTestJSON-1606436818 tempest-ServerMetadataTestJSON-1606436818-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/f684bb17-3ab2-4bd5-a19e-4c36c57d0ebe" {{(pid=61962) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1429.051514] env[61962]: DEBUG oslo_concurrency.lockutils [None req-ae999722-5278-4a0e-8508-ee195677c4b3 tempest-ServerMetadataTestJSON-1606436818 tempest-ServerMetadataTestJSON-1606436818-project-member] Acquired lock "[datastore2] devstack-image-cache_base/f684bb17-3ab2-4bd5-a19e-4c36c57d0ebe" {{(pid=61962) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1429.051675] env[61962]: DEBUG oslo_concurrency.lockutils [None req-ae999722-5278-4a0e-8508-ee195677c4b3 tempest-ServerMetadataTestJSON-1606436818 tempest-ServerMetadataTestJSON-1606436818-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/f684bb17-3ab2-4bd5-a19e-4c36c57d0ebe" {{(pid=61962) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1429.051977] env[61962]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-e6dab7fd-15ed-400f-a19b-fcb2868815c4 {{(pid=61962) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1429.058529] env[61962]: DEBUG oslo_vmware.api [None req-ae999722-5278-4a0e-8508-ee195677c4b3 tempest-ServerMetadataTestJSON-1606436818 tempest-ServerMetadataTestJSON-1606436818-project-member] Waiting for the task: (returnval){ [ 1429.058529] env[61962]: value = "session[5210918f-aadc-9b29-42fa-0929c2e42627]525cd9bd-2ae2-19f5-7da6-cdef489fdc71" [ 1429.058529] env[61962]: _type = "Task" [ 1429.058529] env[61962]: } to complete. {{(pid=61962) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1429.068269] env[61962]: DEBUG oslo_vmware.api [None req-ae999722-5278-4a0e-8508-ee195677c4b3 tempest-ServerMetadataTestJSON-1606436818 tempest-ServerMetadataTestJSON-1606436818-project-member] Task: {'id': session[5210918f-aadc-9b29-42fa-0929c2e42627]525cd9bd-2ae2-19f5-7da6-cdef489fdc71, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61962) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1429.570863] env[61962]: DEBUG oslo_concurrency.lockutils [None req-ae999722-5278-4a0e-8508-ee195677c4b3 tempest-ServerMetadataTestJSON-1606436818 tempest-ServerMetadataTestJSON-1606436818-project-member] Releasing lock "[datastore2] devstack-image-cache_base/f684bb17-3ab2-4bd5-a19e-4c36c57d0ebe" {{(pid=61962) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1429.571451] env[61962]: DEBUG nova.virt.vmwareapi.vmops [None req-ae999722-5278-4a0e-8508-ee195677c4b3 tempest-ServerMetadataTestJSON-1606436818 tempest-ServerMetadataTestJSON-1606436818-project-member] [instance: 3d2749f8-18d2-4631-adc2-093988125eb4] Processing image f684bb17-3ab2-4bd5-a19e-4c36c57d0ebe {{(pid=61962) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1429.571990] env[61962]: DEBUG oslo_concurrency.lockutils [None req-ae999722-5278-4a0e-8508-ee195677c4b3 tempest-ServerMetadataTestJSON-1606436818 tempest-ServerMetadataTestJSON-1606436818-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/f684bb17-3ab2-4bd5-a19e-4c36c57d0ebe/f684bb17-3ab2-4bd5-a19e-4c36c57d0ebe.vmdk" {{(pid=61962) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1429.836765] env[61962]: DEBUG nova.compute.manager [req-eb10d2e1-f7e2-4de1-ad5c-682c1a154e94 req-f669ee5a-8708-4414-9014-238d1e856c0c service nova] [instance: 3d2749f8-18d2-4631-adc2-093988125eb4] Received event network-changed-5d003f22-c89e-4bee-b760-764ae5e48397 {{(pid=61962) external_instance_event /opt/stack/nova/nova/compute/manager.py:11210}} [ 1429.836765] env[61962]: DEBUG nova.compute.manager [req-eb10d2e1-f7e2-4de1-ad5c-682c1a154e94 req-f669ee5a-8708-4414-9014-238d1e856c0c service nova] [instance: 3d2749f8-18d2-4631-adc2-093988125eb4] Refreshing instance network info cache due to event network-changed-5d003f22-c89e-4bee-b760-764ae5e48397. {{(pid=61962) external_instance_event /opt/stack/nova/nova/compute/manager.py:11215}} [ 1429.836765] env[61962]: DEBUG oslo_concurrency.lockutils [req-eb10d2e1-f7e2-4de1-ad5c-682c1a154e94 req-f669ee5a-8708-4414-9014-238d1e856c0c service nova] Acquiring lock "refresh_cache-3d2749f8-18d2-4631-adc2-093988125eb4" {{(pid=61962) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1429.836765] env[61962]: DEBUG oslo_concurrency.lockutils [req-eb10d2e1-f7e2-4de1-ad5c-682c1a154e94 req-f669ee5a-8708-4414-9014-238d1e856c0c service nova] Acquired lock "refresh_cache-3d2749f8-18d2-4631-adc2-093988125eb4" {{(pid=61962) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1429.836765] env[61962]: DEBUG nova.network.neutron [req-eb10d2e1-f7e2-4de1-ad5c-682c1a154e94 req-f669ee5a-8708-4414-9014-238d1e856c0c service nova] [instance: 3d2749f8-18d2-4631-adc2-093988125eb4] Refreshing network info cache for port 5d003f22-c89e-4bee-b760-764ae5e48397 {{(pid=61962) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 1430.141096] env[61962]: DEBUG nova.network.neutron [req-eb10d2e1-f7e2-4de1-ad5c-682c1a154e94 req-f669ee5a-8708-4414-9014-238d1e856c0c service nova] [instance: 3d2749f8-18d2-4631-adc2-093988125eb4] Updated VIF entry in instance network info cache for port 5d003f22-c89e-4bee-b760-764ae5e48397. {{(pid=61962) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 1430.141454] env[61962]: DEBUG nova.network.neutron [req-eb10d2e1-f7e2-4de1-ad5c-682c1a154e94 req-f669ee5a-8708-4414-9014-238d1e856c0c service nova] [instance: 3d2749f8-18d2-4631-adc2-093988125eb4] Updating instance_info_cache with network_info: [{"id": "5d003f22-c89e-4bee-b760-764ae5e48397", "address": "fa:16:3e:cf:7e:d9", "network": {"id": "7126a13b-1d17-46a1-aa40-588932e7b5b0", "bridge": "br-int", "label": "tempest-ServerMetadataTestJSON-2071951343-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "34477c98bf9c4a87971a62cb96b072c7", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "cba18f15-a919-422e-a423-1e705e233389", "external-id": "nsx-vlan-transportzone-79", "segmentation_id": 79, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap5d003f22-c8", "ovs_interfaceid": "5d003f22-c89e-4bee-b760-764ae5e48397", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61962) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1430.151412] env[61962]: DEBUG oslo_concurrency.lockutils [req-eb10d2e1-f7e2-4de1-ad5c-682c1a154e94 req-f669ee5a-8708-4414-9014-238d1e856c0c service nova] Releasing lock "refresh_cache-3d2749f8-18d2-4631-adc2-093988125eb4" {{(pid=61962) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1434.248491] env[61962]: DEBUG oslo_concurrency.lockutils [None req-06cff809-8223-438f-9bd3-fbe8352c389a tempest-ServerMetadataTestJSON-1606436818 tempest-ServerMetadataTestJSON-1606436818-project-member] Acquiring lock "3d2749f8-18d2-4631-adc2-093988125eb4" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=61962) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1453.770998] env[61962]: DEBUG oslo_concurrency.lockutils [None req-8efd776f-39ec-4e55-8eeb-a7bd10a3e451 tempest-ListServerFiltersTestJSON-924365092 tempest-ListServerFiltersTestJSON-924365092-project-member] Acquiring lock "1e71cc99-f026-4197-b7f1-ae18f7f99af6" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61962) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1453.771336] env[61962]: DEBUG oslo_concurrency.lockutils [None req-8efd776f-39ec-4e55-8eeb-a7bd10a3e451 tempest-ListServerFiltersTestJSON-924365092 tempest-ListServerFiltersTestJSON-924365092-project-member] Lock "1e71cc99-f026-4197-b7f1-ae18f7f99af6" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=61962) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1454.124579] env[61962]: DEBUG oslo_concurrency.lockutils [None req-55c96cbf-6671-47d9-82d1-1db28c753841 tempest-ListServerFiltersTestJSON-924365092 tempest-ListServerFiltersTestJSON-924365092-project-member] Acquiring lock "2cf5409f-eaff-4b63-b8bb-eb15a7a81760" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61962) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1454.124579] env[61962]: DEBUG oslo_concurrency.lockutils [None req-55c96cbf-6671-47d9-82d1-1db28c753841 tempest-ListServerFiltersTestJSON-924365092 tempest-ListServerFiltersTestJSON-924365092-project-member] Lock "2cf5409f-eaff-4b63-b8bb-eb15a7a81760" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61962) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1454.478189] env[61962]: DEBUG oslo_concurrency.lockutils [None req-094311a2-9f80-4d6b-9521-9fcfd74b3cac tempest-ListServerFiltersTestJSON-924365092 tempest-ListServerFiltersTestJSON-924365092-project-member] Acquiring lock "c3514756-a861-4662-b859-0881ced78e19" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61962) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1454.478419] env[61962]: DEBUG oslo_concurrency.lockutils [None req-094311a2-9f80-4d6b-9521-9fcfd74b3cac tempest-ListServerFiltersTestJSON-924365092 tempest-ListServerFiltersTestJSON-924365092-project-member] Lock "c3514756-a861-4662-b859-0881ced78e19" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61962) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1466.166334] env[61962]: DEBUG oslo_service.periodic_task [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Running periodic task ComputeManager.update_available_resource {{(pid=61962) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1466.178375] env[61962]: DEBUG oslo_concurrency.lockutils [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=61962) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1466.178589] env[61962]: DEBUG oslo_concurrency.lockutils [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=61962) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1466.178803] env[61962]: DEBUG oslo_concurrency.lockutils [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=61962) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1466.178977] env[61962]: DEBUG nova.compute.resource_tracker [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=61962) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 1466.180262] env[61962]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0ebb3398-f583-4e4e-95b3-a058f1d58d79 {{(pid=61962) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1466.189462] env[61962]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9d7eea1c-1a24-41e3-a4e3-dc45da2f6d9c {{(pid=61962) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1466.203644] env[61962]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b532fe14-46ee-45a6-9119-5a7d7a4097ef {{(pid=61962) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1466.210382] env[61962]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f9df506e-8346-485e-b540-f0d65eee2bf7 {{(pid=61962) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1466.241149] env[61962]: DEBUG nova.compute.resource_tracker [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=180542MB free_disk=96GB free_vcpus=48 pci_devices=None {{(pid=61962) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 1466.241301] env[61962]: DEBUG oslo_concurrency.lockutils [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=61962) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1466.241494] env[61962]: DEBUG oslo_concurrency.lockutils [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=61962) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1466.388837] env[61962]: DEBUG nova.compute.resource_tracker [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Instance e76b5908-3bad-4c19-90e7-b15ca253ad80 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61962) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 1466.389126] env[61962]: DEBUG nova.compute.resource_tracker [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Instance 8ecd73a7-7f23-4994-85b9-479584831687 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61962) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 1466.389352] env[61962]: DEBUG nova.compute.resource_tracker [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Instance d89abe69-dd71-4cfe-b80b-3b2aeba0b8a9 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61962) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 1466.389555] env[61962]: DEBUG nova.compute.resource_tracker [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Instance 5572a1d3-5d2f-4af5-b9f6-ec57506e2069 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61962) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 1466.389763] env[61962]: DEBUG nova.compute.resource_tracker [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Instance d970d822-12ab-43cc-8d34-abc6eff8a68d actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61962) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 1466.389925] env[61962]: DEBUG nova.compute.resource_tracker [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Instance 35bf25a7-a3c2-40f1-b415-42d6167b0a1c actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61962) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 1466.390074] env[61962]: DEBUG nova.compute.resource_tracker [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Instance 1f7429fe-e6f3-4b0a-bae8-38ead4607ae7 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61962) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 1466.390202] env[61962]: DEBUG nova.compute.resource_tracker [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Instance c3c36949-754e-4bab-860d-74e0cde4dfd6 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61962) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 1466.390322] env[61962]: DEBUG nova.compute.resource_tracker [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Instance 6f0c884b-afb3-4d06-9dd2-f07099c364e0 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61962) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 1466.390439] env[61962]: DEBUG nova.compute.resource_tracker [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Instance 3d2749f8-18d2-4631-adc2-093988125eb4 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61962) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 1466.401885] env[61962]: DEBUG nova.compute.resource_tracker [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Instance 1035139f-2193-4d1c-a220-369ef6096ae0 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61962) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1767}} [ 1466.413066] env[61962]: DEBUG nova.compute.resource_tracker [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Instance 53048279-c3ea-484f-9a3e-049b12892f7a has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61962) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1767}} [ 1466.424393] env[61962]: DEBUG nova.compute.resource_tracker [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Instance 4de50eaf-ee14-4aee-b5c0-d08dc5fc8010 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61962) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1767}} [ 1466.434908] env[61962]: DEBUG nova.compute.resource_tracker [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Instance 25f6fa54-3d0d-4470-b5c9-06ba0ad2663d has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61962) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1767}} [ 1466.444966] env[61962]: DEBUG nova.compute.resource_tracker [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Instance d9b93c9e-bc68-4d00-bd14-031dd4b61919 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61962) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1767}} [ 1466.454972] env[61962]: DEBUG nova.compute.resource_tracker [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Instance 1e71cc99-f026-4197-b7f1-ae18f7f99af6 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61962) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1767}} [ 1466.466566] env[61962]: DEBUG nova.compute.resource_tracker [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Instance 2cf5409f-eaff-4b63-b8bb-eb15a7a81760 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61962) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1767}} [ 1466.479546] env[61962]: DEBUG nova.compute.resource_tracker [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Instance c3514756-a861-4662-b859-0881ced78e19 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61962) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1767}} [ 1466.479803] env[61962]: DEBUG nova.compute.resource_tracker [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Total usable vcpus: 48, total allocated vcpus: 10 {{(pid=61962) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 1466.479967] env[61962]: DEBUG nova.compute.resource_tracker [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=1792MB phys_disk=100GB used_disk=10GB total_vcpus=48 used_vcpus=10 pci_stats=[] stats={'failed_builds': '67', 'num_instances': '10', 'num_vm_building': '10', 'num_task_deleting': '10', 'num_os_type_None': '10', 'num_proj_3285f5bbd10d495e852d0ef0c0047847': '1', 'io_workload': '10', 'num_proj_8e79e89d66b44fa092d586334732c0be': '1', 'num_proj_e58b68272c404983993a09ad12e8c03b': '1', 'num_proj_99c377cc8b5545a2b6e712291b14cb04': '1', 'num_proj_696cc1bc78cb4511a76f788d82895fef': '1', 'num_proj_a9c2add945d24a299e5bb0fe543deec3': '1', 'num_proj_e816704de44649099e96127498eed7e9': '1', 'num_proj_eaa1993aea5f4f039f35b32e2321e30f': '1', 'num_proj_a31ea93dd586435ebb82c1dbfd14c2ad': '1', 'num_proj_34477c98bf9c4a87971a62cb96b072c7': '1'} {{(pid=61962) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 1466.496285] env[61962]: DEBUG nova.scheduler.client.report [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Refreshing inventories for resource provider 5a20dc57-fddd-49ec-bab5-953a03eebaa1 {{(pid=61962) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:818}} [ 1466.511895] env[61962]: DEBUG nova.scheduler.client.report [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Updating ProviderTree inventory for provider 5a20dc57-fddd-49ec-bab5-953a03eebaa1 from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 96, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61962) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:782}} [ 1466.511895] env[61962]: DEBUG nova.compute.provider_tree [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Updating inventory in ProviderTree for provider 5a20dc57-fddd-49ec-bab5-953a03eebaa1 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 96, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61962) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1466.522389] env[61962]: DEBUG nova.scheduler.client.report [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Refreshing aggregate associations for resource provider 5a20dc57-fddd-49ec-bab5-953a03eebaa1, aggregates: None {{(pid=61962) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:827}} [ 1466.541324] env[61962]: DEBUG nova.scheduler.client.report [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Refreshing trait associations for resource provider 5a20dc57-fddd-49ec-bab5-953a03eebaa1, traits: COMPUTE_IMAGE_TYPE_ISO,COMPUTE_NODE,COMPUTE_IMAGE_TYPE_VMDK,COMPUTE_NET_ATTACH_INTERFACE,COMPUTE_SAME_HOST_COLD_MIGRATE {{(pid=61962) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:839}} [ 1466.757589] env[61962]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-88c4ae42-7ae5-47b6-be2b-d7d44d3de783 {{(pid=61962) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1466.765662] env[61962]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-acc05318-3593-430e-bce2-589ce38ee791 {{(pid=61962) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1466.796817] env[61962]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0b561804-4fb3-4ec7-9fbe-0a782b011bc0 {{(pid=61962) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1466.805331] env[61962]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9c4d9490-f63b-4299-8799-b12b8a89d03e {{(pid=61962) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1466.820584] env[61962]: DEBUG nova.compute.provider_tree [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Inventory has not changed in ProviderTree for provider: 5a20dc57-fddd-49ec-bab5-953a03eebaa1 {{(pid=61962) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1466.829504] env[61962]: DEBUG nova.scheduler.client.report [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Inventory has not changed for provider 5a20dc57-fddd-49ec-bab5-953a03eebaa1 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 96, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61962) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1466.844781] env[61962]: DEBUG nova.compute.resource_tracker [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=61962) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 1466.844973] env[61962]: DEBUG oslo_concurrency.lockutils [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 0.603s {{(pid=61962) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1468.837246] env[61962]: DEBUG oslo_service.periodic_task [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=61962) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1468.837618] env[61962]: DEBUG nova.compute.manager [None req-ba337279-4320-40ca-b616-7e590432f203 None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=61962) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10626}} [ 1469.153657] env[61962]: DEBUG oslo_service.periodic_task [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=61962) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1471.154355] env[61962]: DEBUG oslo_service.periodic_task [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Running periodic task ComputeManager._sync_scheduler_instance_info {{(pid=61962) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1472.645444] env[61962]: WARNING oslo_vmware.rw_handles [None req-f8f4fbcf-abb6-411e-9491-8d17bed92cc6 tempest-ServerRescueTestJSONUnderV235-1963263010 tempest-ServerRescueTestJSONUnderV235-1963263010-project-member] Error occurred while reading the HTTP response.: http.client.RemoteDisconnected: Remote end closed connection without response [ 1472.645444] env[61962]: ERROR oslo_vmware.rw_handles Traceback (most recent call last): [ 1472.645444] env[61962]: ERROR oslo_vmware.rw_handles File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py", line 283, in close [ 1472.645444] env[61962]: ERROR oslo_vmware.rw_handles self._conn.getresponse() [ 1472.645444] env[61962]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 1375, in getresponse [ 1472.645444] env[61962]: ERROR oslo_vmware.rw_handles response.begin() [ 1472.645444] env[61962]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 318, in begin [ 1472.645444] env[61962]: ERROR oslo_vmware.rw_handles version, status, reason = self._read_status() [ 1472.645444] env[61962]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 287, in _read_status [ 1472.645444] env[61962]: ERROR oslo_vmware.rw_handles raise RemoteDisconnected("Remote end closed connection without" [ 1472.645444] env[61962]: ERROR oslo_vmware.rw_handles http.client.RemoteDisconnected: Remote end closed connection without response [ 1472.645444] env[61962]: ERROR oslo_vmware.rw_handles [ 1472.646109] env[61962]: DEBUG nova.virt.vmwareapi.images [None req-f8f4fbcf-abb6-411e-9491-8d17bed92cc6 tempest-ServerRescueTestJSONUnderV235-1963263010 tempest-ServerRescueTestJSONUnderV235-1963263010-project-member] [instance: e76b5908-3bad-4c19-90e7-b15ca253ad80] Downloaded image file data f684bb17-3ab2-4bd5-a19e-4c36c57d0ebe to vmware_temp/16fbdd88-cab8-47bf-a247-849d34708988/f684bb17-3ab2-4bd5-a19e-4c36c57d0ebe/tmp-sparse.vmdk on the data store datastore2 {{(pid=61962) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:258}} [ 1472.647958] env[61962]: DEBUG nova.virt.vmwareapi.vmops [None req-f8f4fbcf-abb6-411e-9491-8d17bed92cc6 tempest-ServerRescueTestJSONUnderV235-1963263010 tempest-ServerRescueTestJSONUnderV235-1963263010-project-member] [instance: e76b5908-3bad-4c19-90e7-b15ca253ad80] Caching image {{(pid=61962) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 1472.648299] env[61962]: DEBUG nova.virt.vmwareapi.vm_util [None req-f8f4fbcf-abb6-411e-9491-8d17bed92cc6 tempest-ServerRescueTestJSONUnderV235-1963263010 tempest-ServerRescueTestJSONUnderV235-1963263010-project-member] Copying Virtual Disk [datastore2] vmware_temp/16fbdd88-cab8-47bf-a247-849d34708988/f684bb17-3ab2-4bd5-a19e-4c36c57d0ebe/tmp-sparse.vmdk to [datastore2] vmware_temp/16fbdd88-cab8-47bf-a247-849d34708988/f684bb17-3ab2-4bd5-a19e-4c36c57d0ebe/f684bb17-3ab2-4bd5-a19e-4c36c57d0ebe.vmdk {{(pid=61962) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 1472.648612] env[61962]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-7a4126a8-82c6-4adc-8de6-cccc3fbd6df0 {{(pid=61962) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1472.656973] env[61962]: DEBUG oslo_vmware.api [None req-f8f4fbcf-abb6-411e-9491-8d17bed92cc6 tempest-ServerRescueTestJSONUnderV235-1963263010 tempest-ServerRescueTestJSONUnderV235-1963263010-project-member] Waiting for the task: (returnval){ [ 1472.656973] env[61962]: value = "task-4892042" [ 1472.656973] env[61962]: _type = "Task" [ 1472.656973] env[61962]: } to complete. {{(pid=61962) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1472.665600] env[61962]: DEBUG oslo_vmware.api [None req-f8f4fbcf-abb6-411e-9491-8d17bed92cc6 tempest-ServerRescueTestJSONUnderV235-1963263010 tempest-ServerRescueTestJSONUnderV235-1963263010-project-member] Task: {'id': task-4892042, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61962) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1473.158513] env[61962]: DEBUG oslo_service.periodic_task [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=61962) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1473.158702] env[61962]: DEBUG nova.compute.manager [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Starting heal instance info cache {{(pid=61962) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10007}} [ 1473.158847] env[61962]: DEBUG nova.compute.manager [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Rebuilding the list of instances to heal {{(pid=61962) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10011}} [ 1473.179985] env[61962]: DEBUG oslo_vmware.exceptions [None req-f8f4fbcf-abb6-411e-9491-8d17bed92cc6 tempest-ServerRescueTestJSONUnderV235-1963263010 tempest-ServerRescueTestJSONUnderV235-1963263010-project-member] Fault InvalidArgument not matched. {{(pid=61962) get_fault_class /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/exceptions.py:290}} [ 1473.180309] env[61962]: DEBUG oslo_concurrency.lockutils [None req-f8f4fbcf-abb6-411e-9491-8d17bed92cc6 tempest-ServerRescueTestJSONUnderV235-1963263010 tempest-ServerRescueTestJSONUnderV235-1963263010-project-member] Releasing lock "[datastore2] devstack-image-cache_base/f684bb17-3ab2-4bd5-a19e-4c36c57d0ebe/f684bb17-3ab2-4bd5-a19e-4c36c57d0ebe.vmdk" {{(pid=61962) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1473.180928] env[61962]: ERROR nova.compute.manager [None req-f8f4fbcf-abb6-411e-9491-8d17bed92cc6 tempest-ServerRescueTestJSONUnderV235-1963263010 tempest-ServerRescueTestJSONUnderV235-1963263010-project-member] [instance: e76b5908-3bad-4c19-90e7-b15ca253ad80] Instance failed to spawn: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1473.180928] env[61962]: Faults: ['InvalidArgument'] [ 1473.180928] env[61962]: ERROR nova.compute.manager [instance: e76b5908-3bad-4c19-90e7-b15ca253ad80] Traceback (most recent call last): [ 1473.180928] env[61962]: ERROR nova.compute.manager [instance: e76b5908-3bad-4c19-90e7-b15ca253ad80] File "/opt/stack/nova/nova/compute/manager.py", line 2886, in _build_resources [ 1473.180928] env[61962]: ERROR nova.compute.manager [instance: e76b5908-3bad-4c19-90e7-b15ca253ad80] yield resources [ 1473.180928] env[61962]: ERROR nova.compute.manager [instance: e76b5908-3bad-4c19-90e7-b15ca253ad80] File "/opt/stack/nova/nova/compute/manager.py", line 2633, in _build_and_run_instance [ 1473.180928] env[61962]: ERROR nova.compute.manager [instance: e76b5908-3bad-4c19-90e7-b15ca253ad80] self.driver.spawn(context, instance, image_meta, [ 1473.180928] env[61962]: ERROR nova.compute.manager [instance: e76b5908-3bad-4c19-90e7-b15ca253ad80] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 1473.180928] env[61962]: ERROR nova.compute.manager [instance: e76b5908-3bad-4c19-90e7-b15ca253ad80] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1473.180928] env[61962]: ERROR nova.compute.manager [instance: e76b5908-3bad-4c19-90e7-b15ca253ad80] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 1473.180928] env[61962]: ERROR nova.compute.manager [instance: e76b5908-3bad-4c19-90e7-b15ca253ad80] self._fetch_image_if_missing(context, vi) [ 1473.180928] env[61962]: ERROR nova.compute.manager [instance: e76b5908-3bad-4c19-90e7-b15ca253ad80] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 1473.181317] env[61962]: ERROR nova.compute.manager [instance: e76b5908-3bad-4c19-90e7-b15ca253ad80] image_cache(vi, tmp_image_ds_loc) [ 1473.181317] env[61962]: ERROR nova.compute.manager [instance: e76b5908-3bad-4c19-90e7-b15ca253ad80] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 1473.181317] env[61962]: ERROR nova.compute.manager [instance: e76b5908-3bad-4c19-90e7-b15ca253ad80] vm_util.copy_virtual_disk( [ 1473.181317] env[61962]: ERROR nova.compute.manager [instance: e76b5908-3bad-4c19-90e7-b15ca253ad80] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 1473.181317] env[61962]: ERROR nova.compute.manager [instance: e76b5908-3bad-4c19-90e7-b15ca253ad80] session._wait_for_task(vmdk_copy_task) [ 1473.181317] env[61962]: ERROR nova.compute.manager [instance: e76b5908-3bad-4c19-90e7-b15ca253ad80] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 1473.181317] env[61962]: ERROR nova.compute.manager [instance: e76b5908-3bad-4c19-90e7-b15ca253ad80] return self.wait_for_task(task_ref) [ 1473.181317] env[61962]: ERROR nova.compute.manager [instance: e76b5908-3bad-4c19-90e7-b15ca253ad80] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 1473.181317] env[61962]: ERROR nova.compute.manager [instance: e76b5908-3bad-4c19-90e7-b15ca253ad80] return evt.wait() [ 1473.181317] env[61962]: ERROR nova.compute.manager [instance: e76b5908-3bad-4c19-90e7-b15ca253ad80] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 1473.181317] env[61962]: ERROR nova.compute.manager [instance: e76b5908-3bad-4c19-90e7-b15ca253ad80] result = hub.switch() [ 1473.181317] env[61962]: ERROR nova.compute.manager [instance: e76b5908-3bad-4c19-90e7-b15ca253ad80] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 1473.181317] env[61962]: ERROR nova.compute.manager [instance: e76b5908-3bad-4c19-90e7-b15ca253ad80] return self.greenlet.switch() [ 1473.181632] env[61962]: ERROR nova.compute.manager [instance: e76b5908-3bad-4c19-90e7-b15ca253ad80] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 1473.181632] env[61962]: ERROR nova.compute.manager [instance: e76b5908-3bad-4c19-90e7-b15ca253ad80] self.f(*self.args, **self.kw) [ 1473.181632] env[61962]: ERROR nova.compute.manager [instance: e76b5908-3bad-4c19-90e7-b15ca253ad80] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 1473.181632] env[61962]: ERROR nova.compute.manager [instance: e76b5908-3bad-4c19-90e7-b15ca253ad80] raise exceptions.translate_fault(task_info.error) [ 1473.181632] env[61962]: ERROR nova.compute.manager [instance: e76b5908-3bad-4c19-90e7-b15ca253ad80] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1473.181632] env[61962]: ERROR nova.compute.manager [instance: e76b5908-3bad-4c19-90e7-b15ca253ad80] Faults: ['InvalidArgument'] [ 1473.181632] env[61962]: ERROR nova.compute.manager [instance: e76b5908-3bad-4c19-90e7-b15ca253ad80] [ 1473.181632] env[61962]: INFO nova.compute.manager [None req-f8f4fbcf-abb6-411e-9491-8d17bed92cc6 tempest-ServerRescueTestJSONUnderV235-1963263010 tempest-ServerRescueTestJSONUnderV235-1963263010-project-member] [instance: e76b5908-3bad-4c19-90e7-b15ca253ad80] Terminating instance [ 1473.182882] env[61962]: DEBUG oslo_concurrency.lockutils [None req-78c50ddb-9a5d-49c4-8b81-91657e42e15a tempest-ServersTestJSON-410785708 tempest-ServersTestJSON-410785708-project-member] Acquired lock "[datastore2] devstack-image-cache_base/f684bb17-3ab2-4bd5-a19e-4c36c57d0ebe/f684bb17-3ab2-4bd5-a19e-4c36c57d0ebe.vmdk" {{(pid=61962) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1473.183149] env[61962]: DEBUG nova.virt.vmwareapi.ds_util [None req-78c50ddb-9a5d-49c4-8b81-91657e42e15a tempest-ServersTestJSON-410785708 tempest-ServersTestJSON-410785708-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=61962) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1473.183412] env[61962]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-f45bf3a0-c697-4b55-a1fb-05f82dd0a647 {{(pid=61962) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1473.185834] env[61962]: DEBUG nova.compute.manager [None req-f8f4fbcf-abb6-411e-9491-8d17bed92cc6 tempest-ServerRescueTestJSONUnderV235-1963263010 tempest-ServerRescueTestJSONUnderV235-1963263010-project-member] [instance: e76b5908-3bad-4c19-90e7-b15ca253ad80] Start destroying the instance on the hypervisor. {{(pid=61962) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3142}} [ 1473.186065] env[61962]: DEBUG nova.virt.vmwareapi.vmops [None req-f8f4fbcf-abb6-411e-9491-8d17bed92cc6 tempest-ServerRescueTestJSONUnderV235-1963263010 tempest-ServerRescueTestJSONUnderV235-1963263010-project-member] [instance: e76b5908-3bad-4c19-90e7-b15ca253ad80] Destroying instance {{(pid=61962) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1473.188150] env[61962]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-87e17e88-f848-44b6-9bf2-d06e7fbfb2f8 {{(pid=61962) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1473.192710] env[61962]: DEBUG nova.compute.manager [None req-ba337279-4320-40ca-b616-7e590432f203 None None] [instance: e76b5908-3bad-4c19-90e7-b15ca253ad80] Skipping network cache update for instance because it is Building. {{(pid=61962) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10020}} [ 1473.192942] env[61962]: DEBUG nova.compute.manager [None req-ba337279-4320-40ca-b616-7e590432f203 None None] [instance: 8ecd73a7-7f23-4994-85b9-479584831687] Skipping network cache update for instance because it is Building. {{(pid=61962) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10020}} [ 1473.193169] env[61962]: DEBUG nova.compute.manager [None req-ba337279-4320-40ca-b616-7e590432f203 None None] [instance: d89abe69-dd71-4cfe-b80b-3b2aeba0b8a9] Skipping network cache update for instance because it is Building. {{(pid=61962) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10020}} [ 1473.193392] env[61962]: DEBUG nova.compute.manager [None req-ba337279-4320-40ca-b616-7e590432f203 None None] [instance: 5572a1d3-5d2f-4af5-b9f6-ec57506e2069] Skipping network cache update for instance because it is Building. {{(pid=61962) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10020}} [ 1473.193608] env[61962]: DEBUG nova.compute.manager [None req-ba337279-4320-40ca-b616-7e590432f203 None None] [instance: d970d822-12ab-43cc-8d34-abc6eff8a68d] Skipping network cache update for instance because it is Building. {{(pid=61962) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10020}} [ 1473.193773] env[61962]: DEBUG nova.compute.manager [None req-ba337279-4320-40ca-b616-7e590432f203 None None] [instance: 35bf25a7-a3c2-40f1-b415-42d6167b0a1c] Skipping network cache update for instance because it is Building. {{(pid=61962) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10020}} [ 1473.193942] env[61962]: DEBUG nova.compute.manager [None req-ba337279-4320-40ca-b616-7e590432f203 None None] [instance: 1f7429fe-e6f3-4b0a-bae8-38ead4607ae7] Skipping network cache update for instance because it is Building. {{(pid=61962) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10020}} [ 1473.194088] env[61962]: DEBUG nova.compute.manager [None req-ba337279-4320-40ca-b616-7e590432f203 None None] [instance: c3c36949-754e-4bab-860d-74e0cde4dfd6] Skipping network cache update for instance because it is Building. {{(pid=61962) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10020}} [ 1473.194212] env[61962]: DEBUG nova.compute.manager [None req-ba337279-4320-40ca-b616-7e590432f203 None None] [instance: 6f0c884b-afb3-4d06-9dd2-f07099c364e0] Skipping network cache update for instance because it is Building. {{(pid=61962) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10020}} [ 1473.194330] env[61962]: DEBUG nova.compute.manager [None req-ba337279-4320-40ca-b616-7e590432f203 None None] [instance: 3d2749f8-18d2-4631-adc2-093988125eb4] Skipping network cache update for instance because it is Building. {{(pid=61962) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10020}} [ 1473.194449] env[61962]: DEBUG nova.compute.manager [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Didn't find any instances for network info cache update. {{(pid=61962) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10093}} [ 1473.195344] env[61962]: DEBUG oslo_service.periodic_task [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=61962) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1473.200609] env[61962]: DEBUG nova.virt.vmwareapi.vmops [None req-f8f4fbcf-abb6-411e-9491-8d17bed92cc6 tempest-ServerRescueTestJSONUnderV235-1963263010 tempest-ServerRescueTestJSONUnderV235-1963263010-project-member] [instance: e76b5908-3bad-4c19-90e7-b15ca253ad80] Unregistering the VM {{(pid=61962) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 1473.201669] env[61962]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-efdfe2a0-57a3-4c23-8a9e-e1dacc95d93a {{(pid=61962) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1473.203221] env[61962]: DEBUG nova.virt.vmwareapi.ds_util [None req-78c50ddb-9a5d-49c4-8b81-91657e42e15a tempest-ServersTestJSON-410785708 tempest-ServersTestJSON-410785708-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=61962) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1473.203392] env[61962]: DEBUG nova.virt.vmwareapi.vmops [None req-78c50ddb-9a5d-49c4-8b81-91657e42e15a tempest-ServersTestJSON-410785708 tempest-ServersTestJSON-410785708-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=61962) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 1473.204321] env[61962]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-baf3329e-206c-4b97-b7de-733e56108644 {{(pid=61962) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1473.209570] env[61962]: DEBUG oslo_vmware.api [None req-78c50ddb-9a5d-49c4-8b81-91657e42e15a tempest-ServersTestJSON-410785708 tempest-ServersTestJSON-410785708-project-member] Waiting for the task: (returnval){ [ 1473.209570] env[61962]: value = "session[5210918f-aadc-9b29-42fa-0929c2e42627]52d247d4-e66c-bcc3-f90b-95067fc8bb4b" [ 1473.209570] env[61962]: _type = "Task" [ 1473.209570] env[61962]: } to complete. {{(pid=61962) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1473.217658] env[61962]: DEBUG oslo_vmware.api [None req-78c50ddb-9a5d-49c4-8b81-91657e42e15a tempest-ServersTestJSON-410785708 tempest-ServersTestJSON-410785708-project-member] Task: {'id': session[5210918f-aadc-9b29-42fa-0929c2e42627]52d247d4-e66c-bcc3-f90b-95067fc8bb4b, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61962) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1473.285062] env[61962]: DEBUG nova.virt.vmwareapi.vmops [None req-f8f4fbcf-abb6-411e-9491-8d17bed92cc6 tempest-ServerRescueTestJSONUnderV235-1963263010 tempest-ServerRescueTestJSONUnderV235-1963263010-project-member] [instance: e76b5908-3bad-4c19-90e7-b15ca253ad80] Unregistered the VM {{(pid=61962) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 1473.285062] env[61962]: DEBUG nova.virt.vmwareapi.vmops [None req-f8f4fbcf-abb6-411e-9491-8d17bed92cc6 tempest-ServerRescueTestJSONUnderV235-1963263010 tempest-ServerRescueTestJSONUnderV235-1963263010-project-member] [instance: e76b5908-3bad-4c19-90e7-b15ca253ad80] Deleting contents of the VM from datastore datastore2 {{(pid=61962) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 1473.285062] env[61962]: DEBUG nova.virt.vmwareapi.ds_util [None req-f8f4fbcf-abb6-411e-9491-8d17bed92cc6 tempest-ServerRescueTestJSONUnderV235-1963263010 tempest-ServerRescueTestJSONUnderV235-1963263010-project-member] Deleting the datastore file [datastore2] e76b5908-3bad-4c19-90e7-b15ca253ad80 {{(pid=61962) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1473.285062] env[61962]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-5b8f3c6f-5e86-4aaf-8c0c-d009bea0e221 {{(pid=61962) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1473.292227] env[61962]: DEBUG oslo_vmware.api [None req-f8f4fbcf-abb6-411e-9491-8d17bed92cc6 tempest-ServerRescueTestJSONUnderV235-1963263010 tempest-ServerRescueTestJSONUnderV235-1963263010-project-member] Waiting for the task: (returnval){ [ 1473.292227] env[61962]: value = "task-4892044" [ 1473.292227] env[61962]: _type = "Task" [ 1473.292227] env[61962]: } to complete. {{(pid=61962) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1473.300642] env[61962]: DEBUG oslo_vmware.api [None req-f8f4fbcf-abb6-411e-9491-8d17bed92cc6 tempest-ServerRescueTestJSONUnderV235-1963263010 tempest-ServerRescueTestJSONUnderV235-1963263010-project-member] Task: {'id': task-4892044, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61962) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1473.720560] env[61962]: DEBUG nova.virt.vmwareapi.vmops [None req-78c50ddb-9a5d-49c4-8b81-91657e42e15a tempest-ServersTestJSON-410785708 tempest-ServersTestJSON-410785708-project-member] [instance: 8ecd73a7-7f23-4994-85b9-479584831687] Preparing fetch location {{(pid=61962) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 1473.720966] env[61962]: DEBUG nova.virt.vmwareapi.ds_util [None req-78c50ddb-9a5d-49c4-8b81-91657e42e15a tempest-ServersTestJSON-410785708 tempest-ServersTestJSON-410785708-project-member] Creating directory with path [datastore2] vmware_temp/ccd95c83-38c9-4bfe-8c36-14ebfa4b6aca/f684bb17-3ab2-4bd5-a19e-4c36c57d0ebe {{(pid=61962) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1473.721101] env[61962]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-8e5c3121-0207-4e8a-97ff-d1a0e03e4cd5 {{(pid=61962) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1473.738912] env[61962]: DEBUG nova.virt.vmwareapi.ds_util [None req-78c50ddb-9a5d-49c4-8b81-91657e42e15a tempest-ServersTestJSON-410785708 tempest-ServersTestJSON-410785708-project-member] Created directory with path [datastore2] vmware_temp/ccd95c83-38c9-4bfe-8c36-14ebfa4b6aca/f684bb17-3ab2-4bd5-a19e-4c36c57d0ebe {{(pid=61962) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1473.739647] env[61962]: DEBUG nova.virt.vmwareapi.vmops [None req-78c50ddb-9a5d-49c4-8b81-91657e42e15a tempest-ServersTestJSON-410785708 tempest-ServersTestJSON-410785708-project-member] [instance: 8ecd73a7-7f23-4994-85b9-479584831687] Fetch image to [datastore2] vmware_temp/ccd95c83-38c9-4bfe-8c36-14ebfa4b6aca/f684bb17-3ab2-4bd5-a19e-4c36c57d0ebe/tmp-sparse.vmdk {{(pid=61962) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 1473.739647] env[61962]: DEBUG nova.virt.vmwareapi.vmops [None req-78c50ddb-9a5d-49c4-8b81-91657e42e15a tempest-ServersTestJSON-410785708 tempest-ServersTestJSON-410785708-project-member] [instance: 8ecd73a7-7f23-4994-85b9-479584831687] Downloading image file data f684bb17-3ab2-4bd5-a19e-4c36c57d0ebe to [datastore2] vmware_temp/ccd95c83-38c9-4bfe-8c36-14ebfa4b6aca/f684bb17-3ab2-4bd5-a19e-4c36c57d0ebe/tmp-sparse.vmdk on the data store datastore2 {{(pid=61962) _fetch_image_as_file /opt/stack/nova/nova/virt/vmwareapi/vmops.py:399}} [ 1473.740090] env[61962]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3b0d672a-5fa2-4311-a99e-9668a163096c {{(pid=61962) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1473.747837] env[61962]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8fe26289-18ba-4370-9aa9-88c119b60f6b {{(pid=61962) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1473.759050] env[61962]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-624ba26e-cd0f-477f-a736-6180e1bd066b {{(pid=61962) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1473.790674] env[61962]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-427aab78-d074-46ba-9682-03abf8659429 {{(pid=61962) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1473.802436] env[61962]: DEBUG oslo_vmware.service [-] Invoking SessionManager.AcquireGenericServiceTicket with opID=oslo.vmware-919cd1eb-8523-4da9-8464-91e4f987cfd9 {{(pid=61962) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1473.804175] env[61962]: DEBUG oslo_vmware.api [None req-f8f4fbcf-abb6-411e-9491-8d17bed92cc6 tempest-ServerRescueTestJSONUnderV235-1963263010 tempest-ServerRescueTestJSONUnderV235-1963263010-project-member] Task: {'id': task-4892044, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.079256} completed successfully. {{(pid=61962) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1473.804415] env[61962]: DEBUG nova.virt.vmwareapi.ds_util [None req-f8f4fbcf-abb6-411e-9491-8d17bed92cc6 tempest-ServerRescueTestJSONUnderV235-1963263010 tempest-ServerRescueTestJSONUnderV235-1963263010-project-member] Deleted the datastore file {{(pid=61962) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1473.804590] env[61962]: DEBUG nova.virt.vmwareapi.vmops [None req-f8f4fbcf-abb6-411e-9491-8d17bed92cc6 tempest-ServerRescueTestJSONUnderV235-1963263010 tempest-ServerRescueTestJSONUnderV235-1963263010-project-member] [instance: e76b5908-3bad-4c19-90e7-b15ca253ad80] Deleted contents of the VM from datastore datastore2 {{(pid=61962) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 1473.804758] env[61962]: DEBUG nova.virt.vmwareapi.vmops [None req-f8f4fbcf-abb6-411e-9491-8d17bed92cc6 tempest-ServerRescueTestJSONUnderV235-1963263010 tempest-ServerRescueTestJSONUnderV235-1963263010-project-member] [instance: e76b5908-3bad-4c19-90e7-b15ca253ad80] Instance destroyed {{(pid=61962) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1473.804960] env[61962]: INFO nova.compute.manager [None req-f8f4fbcf-abb6-411e-9491-8d17bed92cc6 tempest-ServerRescueTestJSONUnderV235-1963263010 tempest-ServerRescueTestJSONUnderV235-1963263010-project-member] [instance: e76b5908-3bad-4c19-90e7-b15ca253ad80] Took 0.62 seconds to destroy the instance on the hypervisor. [ 1473.807135] env[61962]: DEBUG nova.compute.claims [None req-f8f4fbcf-abb6-411e-9491-8d17bed92cc6 tempest-ServerRescueTestJSONUnderV235-1963263010 tempest-ServerRescueTestJSONUnderV235-1963263010-project-member] [instance: e76b5908-3bad-4c19-90e7-b15ca253ad80] Aborting claim: {{(pid=61962) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 1473.807322] env[61962]: DEBUG oslo_concurrency.lockutils [None req-f8f4fbcf-abb6-411e-9491-8d17bed92cc6 tempest-ServerRescueTestJSONUnderV235-1963263010 tempest-ServerRescueTestJSONUnderV235-1963263010-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=61962) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1473.807544] env[61962]: DEBUG oslo_concurrency.lockutils [None req-f8f4fbcf-abb6-411e-9491-8d17bed92cc6 tempest-ServerRescueTestJSONUnderV235-1963263010 tempest-ServerRescueTestJSONUnderV235-1963263010-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 0.000s {{(pid=61962) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1473.826419] env[61962]: DEBUG nova.virt.vmwareapi.images [None req-78c50ddb-9a5d-49c4-8b81-91657e42e15a tempest-ServersTestJSON-410785708 tempest-ServersTestJSON-410785708-project-member] [instance: 8ecd73a7-7f23-4994-85b9-479584831687] Downloading image file data f684bb17-3ab2-4bd5-a19e-4c36c57d0ebe to the data store datastore2 {{(pid=61962) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:245}} [ 1474.014839] env[61962]: DEBUG oslo_vmware.rw_handles [None req-78c50ddb-9a5d-49c4-8b81-91657e42e15a tempest-ServersTestJSON-410785708 tempest-ServersTestJSON-410785708-project-member] Creating HTTP connection to write to file with size = 21318656 and URL = https://esx7c1n1.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/ccd95c83-38c9-4bfe-8c36-14ebfa4b6aca/f684bb17-3ab2-4bd5-a19e-4c36c57d0ebe/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=61962) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 1474.076895] env[61962]: DEBUG oslo_vmware.rw_handles [None req-78c50ddb-9a5d-49c4-8b81-91657e42e15a tempest-ServersTestJSON-410785708 tempest-ServersTestJSON-410785708-project-member] Completed reading data from the image iterator. {{(pid=61962) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 1474.077150] env[61962]: DEBUG oslo_vmware.rw_handles [None req-78c50ddb-9a5d-49c4-8b81-91657e42e15a tempest-ServersTestJSON-410785708 tempest-ServersTestJSON-410785708-project-member] Closing write handle for https://esx7c1n1.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/ccd95c83-38c9-4bfe-8c36-14ebfa4b6aca/f684bb17-3ab2-4bd5-a19e-4c36c57d0ebe/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=61962) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:281}} [ 1474.141043] env[61962]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-70aafa5b-b4b9-415d-84e5-651784f0856b {{(pid=61962) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1474.149011] env[61962]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ea410007-0e3a-4155-8141-6bc78c5b45f4 {{(pid=61962) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1474.179786] env[61962]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-33250d12-aead-4f69-9101-5125d3474b94 {{(pid=61962) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1474.187530] env[61962]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-266b8fc5-8298-4797-95ea-995a3f5f372f {{(pid=61962) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1474.201147] env[61962]: DEBUG nova.compute.provider_tree [None req-f8f4fbcf-abb6-411e-9491-8d17bed92cc6 tempest-ServerRescueTestJSONUnderV235-1963263010 tempest-ServerRescueTestJSONUnderV235-1963263010-project-member] Inventory has not changed in ProviderTree for provider: 5a20dc57-fddd-49ec-bab5-953a03eebaa1 {{(pid=61962) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1474.209794] env[61962]: DEBUG nova.scheduler.client.report [None req-f8f4fbcf-abb6-411e-9491-8d17bed92cc6 tempest-ServerRescueTestJSONUnderV235-1963263010 tempest-ServerRescueTestJSONUnderV235-1963263010-project-member] Inventory has not changed for provider 5a20dc57-fddd-49ec-bab5-953a03eebaa1 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 96, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61962) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1474.230419] env[61962]: DEBUG oslo_concurrency.lockutils [None req-f8f4fbcf-abb6-411e-9491-8d17bed92cc6 tempest-ServerRescueTestJSONUnderV235-1963263010 tempest-ServerRescueTestJSONUnderV235-1963263010-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 0.423s {{(pid=61962) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1474.231071] env[61962]: ERROR nova.compute.manager [None req-f8f4fbcf-abb6-411e-9491-8d17bed92cc6 tempest-ServerRescueTestJSONUnderV235-1963263010 tempest-ServerRescueTestJSONUnderV235-1963263010-project-member] [instance: e76b5908-3bad-4c19-90e7-b15ca253ad80] Failed to build and run instance: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1474.231071] env[61962]: Faults: ['InvalidArgument'] [ 1474.231071] env[61962]: ERROR nova.compute.manager [instance: e76b5908-3bad-4c19-90e7-b15ca253ad80] Traceback (most recent call last): [ 1474.231071] env[61962]: ERROR nova.compute.manager [instance: e76b5908-3bad-4c19-90e7-b15ca253ad80] File "/opt/stack/nova/nova/compute/manager.py", line 2633, in _build_and_run_instance [ 1474.231071] env[61962]: ERROR nova.compute.manager [instance: e76b5908-3bad-4c19-90e7-b15ca253ad80] self.driver.spawn(context, instance, image_meta, [ 1474.231071] env[61962]: ERROR nova.compute.manager [instance: e76b5908-3bad-4c19-90e7-b15ca253ad80] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 1474.231071] env[61962]: ERROR nova.compute.manager [instance: e76b5908-3bad-4c19-90e7-b15ca253ad80] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1474.231071] env[61962]: ERROR nova.compute.manager [instance: e76b5908-3bad-4c19-90e7-b15ca253ad80] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 1474.231071] env[61962]: ERROR nova.compute.manager [instance: e76b5908-3bad-4c19-90e7-b15ca253ad80] self._fetch_image_if_missing(context, vi) [ 1474.231071] env[61962]: ERROR nova.compute.manager [instance: e76b5908-3bad-4c19-90e7-b15ca253ad80] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 1474.231071] env[61962]: ERROR nova.compute.manager [instance: e76b5908-3bad-4c19-90e7-b15ca253ad80] image_cache(vi, tmp_image_ds_loc) [ 1474.231071] env[61962]: ERROR nova.compute.manager [instance: e76b5908-3bad-4c19-90e7-b15ca253ad80] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 1474.231443] env[61962]: ERROR nova.compute.manager [instance: e76b5908-3bad-4c19-90e7-b15ca253ad80] vm_util.copy_virtual_disk( [ 1474.231443] env[61962]: ERROR nova.compute.manager [instance: e76b5908-3bad-4c19-90e7-b15ca253ad80] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 1474.231443] env[61962]: ERROR nova.compute.manager [instance: e76b5908-3bad-4c19-90e7-b15ca253ad80] session._wait_for_task(vmdk_copy_task) [ 1474.231443] env[61962]: ERROR nova.compute.manager [instance: e76b5908-3bad-4c19-90e7-b15ca253ad80] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 1474.231443] env[61962]: ERROR nova.compute.manager [instance: e76b5908-3bad-4c19-90e7-b15ca253ad80] return self.wait_for_task(task_ref) [ 1474.231443] env[61962]: ERROR nova.compute.manager [instance: e76b5908-3bad-4c19-90e7-b15ca253ad80] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 1474.231443] env[61962]: ERROR nova.compute.manager [instance: e76b5908-3bad-4c19-90e7-b15ca253ad80] return evt.wait() [ 1474.231443] env[61962]: ERROR nova.compute.manager [instance: e76b5908-3bad-4c19-90e7-b15ca253ad80] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 1474.231443] env[61962]: ERROR nova.compute.manager [instance: e76b5908-3bad-4c19-90e7-b15ca253ad80] result = hub.switch() [ 1474.231443] env[61962]: ERROR nova.compute.manager [instance: e76b5908-3bad-4c19-90e7-b15ca253ad80] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 1474.231443] env[61962]: ERROR nova.compute.manager [instance: e76b5908-3bad-4c19-90e7-b15ca253ad80] return self.greenlet.switch() [ 1474.231443] env[61962]: ERROR nova.compute.manager [instance: e76b5908-3bad-4c19-90e7-b15ca253ad80] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 1474.231443] env[61962]: ERROR nova.compute.manager [instance: e76b5908-3bad-4c19-90e7-b15ca253ad80] self.f(*self.args, **self.kw) [ 1474.231786] env[61962]: ERROR nova.compute.manager [instance: e76b5908-3bad-4c19-90e7-b15ca253ad80] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 1474.231786] env[61962]: ERROR nova.compute.manager [instance: e76b5908-3bad-4c19-90e7-b15ca253ad80] raise exceptions.translate_fault(task_info.error) [ 1474.231786] env[61962]: ERROR nova.compute.manager [instance: e76b5908-3bad-4c19-90e7-b15ca253ad80] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1474.231786] env[61962]: ERROR nova.compute.manager [instance: e76b5908-3bad-4c19-90e7-b15ca253ad80] Faults: ['InvalidArgument'] [ 1474.231786] env[61962]: ERROR nova.compute.manager [instance: e76b5908-3bad-4c19-90e7-b15ca253ad80] [ 1474.231932] env[61962]: DEBUG nova.compute.utils [None req-f8f4fbcf-abb6-411e-9491-8d17bed92cc6 tempest-ServerRescueTestJSONUnderV235-1963263010 tempest-ServerRescueTestJSONUnderV235-1963263010-project-member] [instance: e76b5908-3bad-4c19-90e7-b15ca253ad80] VimFaultException {{(pid=61962) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 1474.233847] env[61962]: DEBUG nova.compute.manager [None req-f8f4fbcf-abb6-411e-9491-8d17bed92cc6 tempest-ServerRescueTestJSONUnderV235-1963263010 tempest-ServerRescueTestJSONUnderV235-1963263010-project-member] [instance: e76b5908-3bad-4c19-90e7-b15ca253ad80] Build of instance e76b5908-3bad-4c19-90e7-b15ca253ad80 was re-scheduled: A specified parameter was not correct: fileType [ 1474.233847] env[61962]: Faults: ['InvalidArgument'] {{(pid=61962) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2472}} [ 1474.234319] env[61962]: DEBUG nova.compute.manager [None req-f8f4fbcf-abb6-411e-9491-8d17bed92cc6 tempest-ServerRescueTestJSONUnderV235-1963263010 tempest-ServerRescueTestJSONUnderV235-1963263010-project-member] [instance: e76b5908-3bad-4c19-90e7-b15ca253ad80] Unplugging VIFs for instance {{(pid=61962) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:2998}} [ 1474.234496] env[61962]: DEBUG nova.compute.manager [None req-f8f4fbcf-abb6-411e-9491-8d17bed92cc6 tempest-ServerRescueTestJSONUnderV235-1963263010 tempest-ServerRescueTestJSONUnderV235-1963263010-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=61962) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3021}} [ 1474.234692] env[61962]: DEBUG nova.compute.manager [None req-f8f4fbcf-abb6-411e-9491-8d17bed92cc6 tempest-ServerRescueTestJSONUnderV235-1963263010 tempest-ServerRescueTestJSONUnderV235-1963263010-project-member] [instance: e76b5908-3bad-4c19-90e7-b15ca253ad80] Deallocating network for instance {{(pid=61962) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2281}} [ 1474.234853] env[61962]: DEBUG nova.network.neutron [None req-f8f4fbcf-abb6-411e-9491-8d17bed92cc6 tempest-ServerRescueTestJSONUnderV235-1963263010 tempest-ServerRescueTestJSONUnderV235-1963263010-project-member] [instance: e76b5908-3bad-4c19-90e7-b15ca253ad80] deallocate_for_instance() {{(pid=61962) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1474.607064] env[61962]: DEBUG nova.network.neutron [None req-f8f4fbcf-abb6-411e-9491-8d17bed92cc6 tempest-ServerRescueTestJSONUnderV235-1963263010 tempest-ServerRescueTestJSONUnderV235-1963263010-project-member] [instance: e76b5908-3bad-4c19-90e7-b15ca253ad80] Updating instance_info_cache with network_info: [] {{(pid=61962) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1474.620852] env[61962]: INFO nova.compute.manager [None req-f8f4fbcf-abb6-411e-9491-8d17bed92cc6 tempest-ServerRescueTestJSONUnderV235-1963263010 tempest-ServerRescueTestJSONUnderV235-1963263010-project-member] [instance: e76b5908-3bad-4c19-90e7-b15ca253ad80] Took 0.39 seconds to deallocate network for instance. [ 1474.726485] env[61962]: INFO nova.scheduler.client.report [None req-f8f4fbcf-abb6-411e-9491-8d17bed92cc6 tempest-ServerRescueTestJSONUnderV235-1963263010 tempest-ServerRescueTestJSONUnderV235-1963263010-project-member] Deleted allocations for instance e76b5908-3bad-4c19-90e7-b15ca253ad80 [ 1474.747811] env[61962]: DEBUG oslo_concurrency.lockutils [None req-f8f4fbcf-abb6-411e-9491-8d17bed92cc6 tempest-ServerRescueTestJSONUnderV235-1963263010 tempest-ServerRescueTestJSONUnderV235-1963263010-project-member] Lock "e76b5908-3bad-4c19-90e7-b15ca253ad80" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 676.449s {{(pid=61962) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1474.748925] env[61962]: DEBUG oslo_concurrency.lockutils [None req-d295ccfe-84f2-4576-a0ce-ba7fcc6caa29 tempest-ServerRescueTestJSONUnderV235-1963263010 tempest-ServerRescueTestJSONUnderV235-1963263010-project-member] Lock "e76b5908-3bad-4c19-90e7-b15ca253ad80" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 479.348s {{(pid=61962) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1474.749160] env[61962]: DEBUG oslo_concurrency.lockutils [None req-d295ccfe-84f2-4576-a0ce-ba7fcc6caa29 tempest-ServerRescueTestJSONUnderV235-1963263010 tempest-ServerRescueTestJSONUnderV235-1963263010-project-member] Acquiring lock "e76b5908-3bad-4c19-90e7-b15ca253ad80-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=61962) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1474.749364] env[61962]: DEBUG oslo_concurrency.lockutils [None req-d295ccfe-84f2-4576-a0ce-ba7fcc6caa29 tempest-ServerRescueTestJSONUnderV235-1963263010 tempest-ServerRescueTestJSONUnderV235-1963263010-project-member] Lock "e76b5908-3bad-4c19-90e7-b15ca253ad80-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=61962) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1474.749528] env[61962]: DEBUG oslo_concurrency.lockutils [None req-d295ccfe-84f2-4576-a0ce-ba7fcc6caa29 tempest-ServerRescueTestJSONUnderV235-1963263010 tempest-ServerRescueTestJSONUnderV235-1963263010-project-member] Lock "e76b5908-3bad-4c19-90e7-b15ca253ad80-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=61962) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1474.751518] env[61962]: INFO nova.compute.manager [None req-d295ccfe-84f2-4576-a0ce-ba7fcc6caa29 tempest-ServerRescueTestJSONUnderV235-1963263010 tempest-ServerRescueTestJSONUnderV235-1963263010-project-member] [instance: e76b5908-3bad-4c19-90e7-b15ca253ad80] Terminating instance [ 1474.753248] env[61962]: DEBUG nova.compute.manager [None req-d295ccfe-84f2-4576-a0ce-ba7fcc6caa29 tempest-ServerRescueTestJSONUnderV235-1963263010 tempest-ServerRescueTestJSONUnderV235-1963263010-project-member] [instance: e76b5908-3bad-4c19-90e7-b15ca253ad80] Start destroying the instance on the hypervisor. {{(pid=61962) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3142}} [ 1474.753477] env[61962]: DEBUG nova.virt.vmwareapi.vmops [None req-d295ccfe-84f2-4576-a0ce-ba7fcc6caa29 tempest-ServerRescueTestJSONUnderV235-1963263010 tempest-ServerRescueTestJSONUnderV235-1963263010-project-member] [instance: e76b5908-3bad-4c19-90e7-b15ca253ad80] Destroying instance {{(pid=61962) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1474.753959] env[61962]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-a2caab17-6b0e-4695-a67d-509ad5ca27db {{(pid=61962) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1474.764309] env[61962]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a4bd2839-c363-4381-b8d1-bdd91dc3a024 {{(pid=61962) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1474.774922] env[61962]: DEBUG nova.compute.manager [None req-b03c4c30-9fd6-4923-9e36-c3edd6c4e2a5 tempest-ImagesTestJSON-240028984 tempest-ImagesTestJSON-240028984-project-member] [instance: 1035139f-2193-4d1c-a220-369ef6096ae0] Starting instance... {{(pid=61962) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2424}} [ 1474.799444] env[61962]: WARNING nova.virt.vmwareapi.vmops [None req-d295ccfe-84f2-4576-a0ce-ba7fcc6caa29 tempest-ServerRescueTestJSONUnderV235-1963263010 tempest-ServerRescueTestJSONUnderV235-1963263010-project-member] [instance: e76b5908-3bad-4c19-90e7-b15ca253ad80] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance e76b5908-3bad-4c19-90e7-b15ca253ad80 could not be found. [ 1474.799644] env[61962]: DEBUG nova.virt.vmwareapi.vmops [None req-d295ccfe-84f2-4576-a0ce-ba7fcc6caa29 tempest-ServerRescueTestJSONUnderV235-1963263010 tempest-ServerRescueTestJSONUnderV235-1963263010-project-member] [instance: e76b5908-3bad-4c19-90e7-b15ca253ad80] Instance destroyed {{(pid=61962) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1474.799865] env[61962]: INFO nova.compute.manager [None req-d295ccfe-84f2-4576-a0ce-ba7fcc6caa29 tempest-ServerRescueTestJSONUnderV235-1963263010 tempest-ServerRescueTestJSONUnderV235-1963263010-project-member] [instance: e76b5908-3bad-4c19-90e7-b15ca253ad80] Took 0.05 seconds to destroy the instance on the hypervisor. [ 1474.800173] env[61962]: DEBUG oslo.service.loopingcall [None req-d295ccfe-84f2-4576-a0ce-ba7fcc6caa29 tempest-ServerRescueTestJSONUnderV235-1963263010 tempest-ServerRescueTestJSONUnderV235-1963263010-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61962) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1474.800584] env[61962]: DEBUG nova.compute.manager [-] [instance: e76b5908-3bad-4c19-90e7-b15ca253ad80] Deallocating network for instance {{(pid=61962) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2281}} [ 1474.800709] env[61962]: DEBUG nova.network.neutron [-] [instance: e76b5908-3bad-4c19-90e7-b15ca253ad80] deallocate_for_instance() {{(pid=61962) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1474.826629] env[61962]: DEBUG nova.network.neutron [-] [instance: e76b5908-3bad-4c19-90e7-b15ca253ad80] Updating instance_info_cache with network_info: [] {{(pid=61962) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1474.836551] env[61962]: INFO nova.compute.manager [-] [instance: e76b5908-3bad-4c19-90e7-b15ca253ad80] Took 0.04 seconds to deallocate network for instance. [ 1474.849334] env[61962]: DEBUG oslo_concurrency.lockutils [None req-b03c4c30-9fd6-4923-9e36-c3edd6c4e2a5 tempest-ImagesTestJSON-240028984 tempest-ImagesTestJSON-240028984-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61962) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1474.849584] env[61962]: DEBUG oslo_concurrency.lockutils [None req-b03c4c30-9fd6-4923-9e36-c3edd6c4e2a5 tempest-ImagesTestJSON-240028984 tempest-ImagesTestJSON-240028984-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=61962) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1474.851046] env[61962]: INFO nova.compute.claims [None req-b03c4c30-9fd6-4923-9e36-c3edd6c4e2a5 tempest-ImagesTestJSON-240028984 tempest-ImagesTestJSON-240028984-project-member] [instance: 1035139f-2193-4d1c-a220-369ef6096ae0] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1474.953433] env[61962]: DEBUG oslo_concurrency.lockutils [None req-d295ccfe-84f2-4576-a0ce-ba7fcc6caa29 tempest-ServerRescueTestJSONUnderV235-1963263010 tempest-ServerRescueTestJSONUnderV235-1963263010-project-member] Lock "e76b5908-3bad-4c19-90e7-b15ca253ad80" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 0.204s {{(pid=61962) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1474.954348] env[61962]: DEBUG oslo_concurrency.lockutils [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Lock "e76b5908-3bad-4c19-90e7-b15ca253ad80" acquired by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: waited 344.437s {{(pid=61962) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1474.954554] env[61962]: INFO nova.compute.manager [None req-ba337279-4320-40ca-b616-7e590432f203 None None] [instance: e76b5908-3bad-4c19-90e7-b15ca253ad80] During sync_power_state the instance has a pending task (deleting). Skip. [ 1474.954742] env[61962]: DEBUG oslo_concurrency.lockutils [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Lock "e76b5908-3bad-4c19-90e7-b15ca253ad80" "released" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: held 0.000s {{(pid=61962) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1475.143180] env[61962]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0fe0b01c-7200-40b5-9fa0-64c69702f01a {{(pid=61962) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1475.151220] env[61962]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fc9ed5c2-45b7-441a-9a5b-ea48d2ad3984 {{(pid=61962) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1475.180100] env[61962]: DEBUG oslo_service.periodic_task [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=61962) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1475.181139] env[61962]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ae747cbe-9c8c-489c-a8cf-ee82b975d9fe {{(pid=61962) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1475.188709] env[61962]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3472fa08-0b5b-4807-8f96-08885abc92e3 {{(pid=61962) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1475.201917] env[61962]: DEBUG nova.compute.provider_tree [None req-b03c4c30-9fd6-4923-9e36-c3edd6c4e2a5 tempest-ImagesTestJSON-240028984 tempest-ImagesTestJSON-240028984-project-member] Inventory has not changed in ProviderTree for provider: 5a20dc57-fddd-49ec-bab5-953a03eebaa1 {{(pid=61962) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1475.210692] env[61962]: DEBUG nova.scheduler.client.report [None req-b03c4c30-9fd6-4923-9e36-c3edd6c4e2a5 tempest-ImagesTestJSON-240028984 tempest-ImagesTestJSON-240028984-project-member] Inventory has not changed for provider 5a20dc57-fddd-49ec-bab5-953a03eebaa1 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 96, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61962) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1475.225972] env[61962]: DEBUG oslo_concurrency.lockutils [None req-b03c4c30-9fd6-4923-9e36-c3edd6c4e2a5 tempest-ImagesTestJSON-240028984 tempest-ImagesTestJSON-240028984-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.376s {{(pid=61962) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1475.226460] env[61962]: DEBUG nova.compute.manager [None req-b03c4c30-9fd6-4923-9e36-c3edd6c4e2a5 tempest-ImagesTestJSON-240028984 tempest-ImagesTestJSON-240028984-project-member] [instance: 1035139f-2193-4d1c-a220-369ef6096ae0] Start building networks asynchronously for instance. {{(pid=61962) _build_resources /opt/stack/nova/nova/compute/manager.py:2821}} [ 1475.261730] env[61962]: DEBUG nova.compute.utils [None req-b03c4c30-9fd6-4923-9e36-c3edd6c4e2a5 tempest-ImagesTestJSON-240028984 tempest-ImagesTestJSON-240028984-project-member] Using /dev/sd instead of None {{(pid=61962) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1475.264053] env[61962]: DEBUG nova.compute.manager [None req-b03c4c30-9fd6-4923-9e36-c3edd6c4e2a5 tempest-ImagesTestJSON-240028984 tempest-ImagesTestJSON-240028984-project-member] [instance: 1035139f-2193-4d1c-a220-369ef6096ae0] Allocating IP information in the background. {{(pid=61962) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1973}} [ 1475.264053] env[61962]: DEBUG nova.network.neutron [None req-b03c4c30-9fd6-4923-9e36-c3edd6c4e2a5 tempest-ImagesTestJSON-240028984 tempest-ImagesTestJSON-240028984-project-member] [instance: 1035139f-2193-4d1c-a220-369ef6096ae0] allocate_for_instance() {{(pid=61962) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 1475.272957] env[61962]: DEBUG nova.compute.manager [None req-b03c4c30-9fd6-4923-9e36-c3edd6c4e2a5 tempest-ImagesTestJSON-240028984 tempest-ImagesTestJSON-240028984-project-member] [instance: 1035139f-2193-4d1c-a220-369ef6096ae0] Start building block device mappings for instance. {{(pid=61962) _build_resources /opt/stack/nova/nova/compute/manager.py:2856}} [ 1475.333133] env[61962]: DEBUG nova.policy [None req-b03c4c30-9fd6-4923-9e36-c3edd6c4e2a5 tempest-ImagesTestJSON-240028984 tempest-ImagesTestJSON-240028984-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '84e0d67cbeef490f920225baaa7ef0bc', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'e58b68272c404983993a09ad12e8c03b', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61962) authorize /opt/stack/nova/nova/policy.py:203}} [ 1475.347290] env[61962]: DEBUG nova.compute.manager [None req-b03c4c30-9fd6-4923-9e36-c3edd6c4e2a5 tempest-ImagesTestJSON-240028984 tempest-ImagesTestJSON-240028984-project-member] [instance: 1035139f-2193-4d1c-a220-369ef6096ae0] Start spawning the instance on the hypervisor. {{(pid=61962) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2630}} [ 1475.376924] env[61962]: DEBUG nova.virt.hardware [None req-b03c4c30-9fd6-4923-9e36-c3edd6c4e2a5 tempest-ImagesTestJSON-240028984 tempest-ImagesTestJSON-240028984-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-12-01T12:09:23Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=128,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-12-01T12:09:07Z,direct_url=,disk_format='vmdk',id=f684bb17-3ab2-4bd5-a19e-4c36c57d0ebe,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='eef556fb5c4f49b889491ae31a496de5',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-12-01T12:09:08Z,virtual_size=,visibility=), allow threads: False {{(pid=61962) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1475.377275] env[61962]: DEBUG nova.virt.hardware [None req-b03c4c30-9fd6-4923-9e36-c3edd6c4e2a5 tempest-ImagesTestJSON-240028984 tempest-ImagesTestJSON-240028984-project-member] Flavor limits 0:0:0 {{(pid=61962) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1475.377442] env[61962]: DEBUG nova.virt.hardware [None req-b03c4c30-9fd6-4923-9e36-c3edd6c4e2a5 tempest-ImagesTestJSON-240028984 tempest-ImagesTestJSON-240028984-project-member] Image limits 0:0:0 {{(pid=61962) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1475.377626] env[61962]: DEBUG nova.virt.hardware [None req-b03c4c30-9fd6-4923-9e36-c3edd6c4e2a5 tempest-ImagesTestJSON-240028984 tempest-ImagesTestJSON-240028984-project-member] Flavor pref 0:0:0 {{(pid=61962) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1475.377774] env[61962]: DEBUG nova.virt.hardware [None req-b03c4c30-9fd6-4923-9e36-c3edd6c4e2a5 tempest-ImagesTestJSON-240028984 tempest-ImagesTestJSON-240028984-project-member] Image pref 0:0:0 {{(pid=61962) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1475.377923] env[61962]: DEBUG nova.virt.hardware [None req-b03c4c30-9fd6-4923-9e36-c3edd6c4e2a5 tempest-ImagesTestJSON-240028984 tempest-ImagesTestJSON-240028984-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61962) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1475.378271] env[61962]: DEBUG nova.virt.hardware [None req-b03c4c30-9fd6-4923-9e36-c3edd6c4e2a5 tempest-ImagesTestJSON-240028984 tempest-ImagesTestJSON-240028984-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61962) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1475.378449] env[61962]: DEBUG nova.virt.hardware [None req-b03c4c30-9fd6-4923-9e36-c3edd6c4e2a5 tempest-ImagesTestJSON-240028984 tempest-ImagesTestJSON-240028984-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61962) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1475.378621] env[61962]: DEBUG nova.virt.hardware [None req-b03c4c30-9fd6-4923-9e36-c3edd6c4e2a5 tempest-ImagesTestJSON-240028984 tempest-ImagesTestJSON-240028984-project-member] Got 1 possible topologies {{(pid=61962) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1475.378786] env[61962]: DEBUG nova.virt.hardware [None req-b03c4c30-9fd6-4923-9e36-c3edd6c4e2a5 tempest-ImagesTestJSON-240028984 tempest-ImagesTestJSON-240028984-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61962) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1475.378956] env[61962]: DEBUG nova.virt.hardware [None req-b03c4c30-9fd6-4923-9e36-c3edd6c4e2a5 tempest-ImagesTestJSON-240028984 tempest-ImagesTestJSON-240028984-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61962) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1475.380018] env[61962]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5d917bad-fc97-486d-b600-935c66d56f63 {{(pid=61962) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1475.389825] env[61962]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-504f06a1-63f7-4d12-9b52-f5941019f28f {{(pid=61962) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1475.673674] env[61962]: DEBUG nova.network.neutron [None req-b03c4c30-9fd6-4923-9e36-c3edd6c4e2a5 tempest-ImagesTestJSON-240028984 tempest-ImagesTestJSON-240028984-project-member] [instance: 1035139f-2193-4d1c-a220-369ef6096ae0] Successfully created port: 166f7db8-1433-4bc9-ab6f-73891a83ebb5 {{(pid=61962) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1476.158061] env[61962]: DEBUG oslo_service.periodic_task [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=61962) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1476.483822] env[61962]: DEBUG nova.network.neutron [None req-b03c4c30-9fd6-4923-9e36-c3edd6c4e2a5 tempest-ImagesTestJSON-240028984 tempest-ImagesTestJSON-240028984-project-member] [instance: 1035139f-2193-4d1c-a220-369ef6096ae0] Successfully updated port: 166f7db8-1433-4bc9-ab6f-73891a83ebb5 {{(pid=61962) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1476.501920] env[61962]: DEBUG oslo_concurrency.lockutils [None req-b03c4c30-9fd6-4923-9e36-c3edd6c4e2a5 tempest-ImagesTestJSON-240028984 tempest-ImagesTestJSON-240028984-project-member] Acquiring lock "refresh_cache-1035139f-2193-4d1c-a220-369ef6096ae0" {{(pid=61962) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1476.502156] env[61962]: DEBUG oslo_concurrency.lockutils [None req-b03c4c30-9fd6-4923-9e36-c3edd6c4e2a5 tempest-ImagesTestJSON-240028984 tempest-ImagesTestJSON-240028984-project-member] Acquired lock "refresh_cache-1035139f-2193-4d1c-a220-369ef6096ae0" {{(pid=61962) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1476.502226] env[61962]: DEBUG nova.network.neutron [None req-b03c4c30-9fd6-4923-9e36-c3edd6c4e2a5 tempest-ImagesTestJSON-240028984 tempest-ImagesTestJSON-240028984-project-member] [instance: 1035139f-2193-4d1c-a220-369ef6096ae0] Building network info cache for instance {{(pid=61962) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 1476.579242] env[61962]: DEBUG nova.network.neutron [None req-b03c4c30-9fd6-4923-9e36-c3edd6c4e2a5 tempest-ImagesTestJSON-240028984 tempest-ImagesTestJSON-240028984-project-member] [instance: 1035139f-2193-4d1c-a220-369ef6096ae0] Instance cache missing network info. {{(pid=61962) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 1476.675938] env[61962]: DEBUG nova.compute.manager [req-9e2f2d69-01a8-42a7-bc8a-999474c11fb2 req-034f7d8e-4c01-4324-b32b-5ef9e4144193 service nova] [instance: 1035139f-2193-4d1c-a220-369ef6096ae0] Received event network-vif-plugged-166f7db8-1433-4bc9-ab6f-73891a83ebb5 {{(pid=61962) external_instance_event /opt/stack/nova/nova/compute/manager.py:11210}} [ 1476.676161] env[61962]: DEBUG oslo_concurrency.lockutils [req-9e2f2d69-01a8-42a7-bc8a-999474c11fb2 req-034f7d8e-4c01-4324-b32b-5ef9e4144193 service nova] Acquiring lock "1035139f-2193-4d1c-a220-369ef6096ae0-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=61962) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1476.677545] env[61962]: DEBUG oslo_concurrency.lockutils [req-9e2f2d69-01a8-42a7-bc8a-999474c11fb2 req-034f7d8e-4c01-4324-b32b-5ef9e4144193 service nova] Lock "1035139f-2193-4d1c-a220-369ef6096ae0-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=61962) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1476.677545] env[61962]: DEBUG oslo_concurrency.lockutils [req-9e2f2d69-01a8-42a7-bc8a-999474c11fb2 req-034f7d8e-4c01-4324-b32b-5ef9e4144193 service nova] Lock "1035139f-2193-4d1c-a220-369ef6096ae0-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=61962) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1476.677545] env[61962]: DEBUG nova.compute.manager [req-9e2f2d69-01a8-42a7-bc8a-999474c11fb2 req-034f7d8e-4c01-4324-b32b-5ef9e4144193 service nova] [instance: 1035139f-2193-4d1c-a220-369ef6096ae0] No waiting events found dispatching network-vif-plugged-166f7db8-1433-4bc9-ab6f-73891a83ebb5 {{(pid=61962) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1476.677545] env[61962]: WARNING nova.compute.manager [req-9e2f2d69-01a8-42a7-bc8a-999474c11fb2 req-034f7d8e-4c01-4324-b32b-5ef9e4144193 service nova] [instance: 1035139f-2193-4d1c-a220-369ef6096ae0] Received unexpected event network-vif-plugged-166f7db8-1433-4bc9-ab6f-73891a83ebb5 for instance with vm_state building and task_state spawning. [ 1476.677731] env[61962]: DEBUG nova.compute.manager [req-9e2f2d69-01a8-42a7-bc8a-999474c11fb2 req-034f7d8e-4c01-4324-b32b-5ef9e4144193 service nova] [instance: 1035139f-2193-4d1c-a220-369ef6096ae0] Received event network-changed-166f7db8-1433-4bc9-ab6f-73891a83ebb5 {{(pid=61962) external_instance_event /opt/stack/nova/nova/compute/manager.py:11210}} [ 1476.677926] env[61962]: DEBUG nova.compute.manager [req-9e2f2d69-01a8-42a7-bc8a-999474c11fb2 req-034f7d8e-4c01-4324-b32b-5ef9e4144193 service nova] [instance: 1035139f-2193-4d1c-a220-369ef6096ae0] Refreshing instance network info cache due to event network-changed-166f7db8-1433-4bc9-ab6f-73891a83ebb5. {{(pid=61962) external_instance_event /opt/stack/nova/nova/compute/manager.py:11215}} [ 1476.678180] env[61962]: DEBUG oslo_concurrency.lockutils [req-9e2f2d69-01a8-42a7-bc8a-999474c11fb2 req-034f7d8e-4c01-4324-b32b-5ef9e4144193 service nova] Acquiring lock "refresh_cache-1035139f-2193-4d1c-a220-369ef6096ae0" {{(pid=61962) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1476.788455] env[61962]: DEBUG nova.network.neutron [None req-b03c4c30-9fd6-4923-9e36-c3edd6c4e2a5 tempest-ImagesTestJSON-240028984 tempest-ImagesTestJSON-240028984-project-member] [instance: 1035139f-2193-4d1c-a220-369ef6096ae0] Updating instance_info_cache with network_info: [{"id": "166f7db8-1433-4bc9-ab6f-73891a83ebb5", "address": "fa:16:3e:01:4a:73", "network": {"id": "4128bf2d-763a-4743-9aa9-47746ebfaa93", "bridge": "br-int", "label": "tempest-ImagesTestJSON-1442080107-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "e58b68272c404983993a09ad12e8c03b", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "be8bd197-4b2b-46e7-88ea-2554b0438584", "external-id": "nsx-vlan-transportzone-338", "segmentation_id": 338, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap166f7db8-14", "ovs_interfaceid": "166f7db8-1433-4bc9-ab6f-73891a83ebb5", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61962) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1476.803331] env[61962]: DEBUG oslo_concurrency.lockutils [None req-b03c4c30-9fd6-4923-9e36-c3edd6c4e2a5 tempest-ImagesTestJSON-240028984 tempest-ImagesTestJSON-240028984-project-member] Releasing lock "refresh_cache-1035139f-2193-4d1c-a220-369ef6096ae0" {{(pid=61962) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1476.803622] env[61962]: DEBUG nova.compute.manager [None req-b03c4c30-9fd6-4923-9e36-c3edd6c4e2a5 tempest-ImagesTestJSON-240028984 tempest-ImagesTestJSON-240028984-project-member] [instance: 1035139f-2193-4d1c-a220-369ef6096ae0] Instance network_info: |[{"id": "166f7db8-1433-4bc9-ab6f-73891a83ebb5", "address": "fa:16:3e:01:4a:73", "network": {"id": "4128bf2d-763a-4743-9aa9-47746ebfaa93", "bridge": "br-int", "label": "tempest-ImagesTestJSON-1442080107-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "e58b68272c404983993a09ad12e8c03b", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "be8bd197-4b2b-46e7-88ea-2554b0438584", "external-id": "nsx-vlan-transportzone-338", "segmentation_id": 338, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap166f7db8-14", "ovs_interfaceid": "166f7db8-1433-4bc9-ab6f-73891a83ebb5", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=61962) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1476.803941] env[61962]: DEBUG oslo_concurrency.lockutils [req-9e2f2d69-01a8-42a7-bc8a-999474c11fb2 req-034f7d8e-4c01-4324-b32b-5ef9e4144193 service nova] Acquired lock "refresh_cache-1035139f-2193-4d1c-a220-369ef6096ae0" {{(pid=61962) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1476.804182] env[61962]: DEBUG nova.network.neutron [req-9e2f2d69-01a8-42a7-bc8a-999474c11fb2 req-034f7d8e-4c01-4324-b32b-5ef9e4144193 service nova] [instance: 1035139f-2193-4d1c-a220-369ef6096ae0] Refreshing network info cache for port 166f7db8-1433-4bc9-ab6f-73891a83ebb5 {{(pid=61962) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 1476.805760] env[61962]: DEBUG nova.virt.vmwareapi.vmops [None req-b03c4c30-9fd6-4923-9e36-c3edd6c4e2a5 tempest-ImagesTestJSON-240028984 tempest-ImagesTestJSON-240028984-project-member] [instance: 1035139f-2193-4d1c-a220-369ef6096ae0] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:01:4a:73', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'be8bd197-4b2b-46e7-88ea-2554b0438584', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '166f7db8-1433-4bc9-ab6f-73891a83ebb5', 'vif_model': 'vmxnet3'}] {{(pid=61962) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1476.812690] env[61962]: DEBUG oslo.service.loopingcall [None req-b03c4c30-9fd6-4923-9e36-c3edd6c4e2a5 tempest-ImagesTestJSON-240028984 tempest-ImagesTestJSON-240028984-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=61962) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1476.815942] env[61962]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 1035139f-2193-4d1c-a220-369ef6096ae0] Creating VM on the ESX host {{(pid=61962) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 1476.816764] env[61962]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-aeed0b6d-6d42-4c2e-82c5-5034a161760a {{(pid=61962) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1476.838274] env[61962]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1476.838274] env[61962]: value = "task-4892045" [ 1476.838274] env[61962]: _type = "Task" [ 1476.838274] env[61962]: } to complete. {{(pid=61962) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1476.847844] env[61962]: DEBUG oslo_vmware.api [-] Task: {'id': task-4892045, 'name': CreateVM_Task} progress is 0%. {{(pid=61962) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1477.153560] env[61962]: DEBUG nova.network.neutron [req-9e2f2d69-01a8-42a7-bc8a-999474c11fb2 req-034f7d8e-4c01-4324-b32b-5ef9e4144193 service nova] [instance: 1035139f-2193-4d1c-a220-369ef6096ae0] Updated VIF entry in instance network info cache for port 166f7db8-1433-4bc9-ab6f-73891a83ebb5. {{(pid=61962) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 1477.154076] env[61962]: DEBUG nova.network.neutron [req-9e2f2d69-01a8-42a7-bc8a-999474c11fb2 req-034f7d8e-4c01-4324-b32b-5ef9e4144193 service nova] [instance: 1035139f-2193-4d1c-a220-369ef6096ae0] Updating instance_info_cache with network_info: [{"id": "166f7db8-1433-4bc9-ab6f-73891a83ebb5", "address": "fa:16:3e:01:4a:73", "network": {"id": "4128bf2d-763a-4743-9aa9-47746ebfaa93", "bridge": "br-int", "label": "tempest-ImagesTestJSON-1442080107-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "e58b68272c404983993a09ad12e8c03b", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "be8bd197-4b2b-46e7-88ea-2554b0438584", "external-id": "nsx-vlan-transportzone-338", "segmentation_id": 338, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap166f7db8-14", "ovs_interfaceid": "166f7db8-1433-4bc9-ab6f-73891a83ebb5", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61962) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1477.165213] env[61962]: DEBUG oslo_concurrency.lockutils [req-9e2f2d69-01a8-42a7-bc8a-999474c11fb2 req-034f7d8e-4c01-4324-b32b-5ef9e4144193 service nova] Releasing lock "refresh_cache-1035139f-2193-4d1c-a220-369ef6096ae0" {{(pid=61962) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1477.348761] env[61962]: DEBUG oslo_vmware.api [-] Task: {'id': task-4892045, 'name': CreateVM_Task, 'duration_secs': 0.346072} completed successfully. {{(pid=61962) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1477.348946] env[61962]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 1035139f-2193-4d1c-a220-369ef6096ae0] Created VM on the ESX host {{(pid=61962) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 1477.349654] env[61962]: DEBUG oslo_concurrency.lockutils [None req-b03c4c30-9fd6-4923-9e36-c3edd6c4e2a5 tempest-ImagesTestJSON-240028984 tempest-ImagesTestJSON-240028984-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/f684bb17-3ab2-4bd5-a19e-4c36c57d0ebe" {{(pid=61962) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1477.349895] env[61962]: DEBUG oslo_concurrency.lockutils [None req-b03c4c30-9fd6-4923-9e36-c3edd6c4e2a5 tempest-ImagesTestJSON-240028984 tempest-ImagesTestJSON-240028984-project-member] Acquired lock "[datastore2] devstack-image-cache_base/f684bb17-3ab2-4bd5-a19e-4c36c57d0ebe" {{(pid=61962) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1477.350149] env[61962]: DEBUG oslo_concurrency.lockutils [None req-b03c4c30-9fd6-4923-9e36-c3edd6c4e2a5 tempest-ImagesTestJSON-240028984 tempest-ImagesTestJSON-240028984-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/f684bb17-3ab2-4bd5-a19e-4c36c57d0ebe" {{(pid=61962) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1477.350422] env[61962]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-fd0c42c4-7255-480f-b906-8d2e48de10bb {{(pid=61962) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1477.356865] env[61962]: DEBUG oslo_vmware.api [None req-b03c4c30-9fd6-4923-9e36-c3edd6c4e2a5 tempest-ImagesTestJSON-240028984 tempest-ImagesTestJSON-240028984-project-member] Waiting for the task: (returnval){ [ 1477.356865] env[61962]: value = "session[5210918f-aadc-9b29-42fa-0929c2e42627]52ebf344-f83d-5714-1e50-25c55911c39d" [ 1477.356865] env[61962]: _type = "Task" [ 1477.356865] env[61962]: } to complete. {{(pid=61962) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1477.365778] env[61962]: DEBUG oslo_vmware.api [None req-b03c4c30-9fd6-4923-9e36-c3edd6c4e2a5 tempest-ImagesTestJSON-240028984 tempest-ImagesTestJSON-240028984-project-member] Task: {'id': session[5210918f-aadc-9b29-42fa-0929c2e42627]52ebf344-f83d-5714-1e50-25c55911c39d, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61962) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1477.867682] env[61962]: DEBUG oslo_concurrency.lockutils [None req-b03c4c30-9fd6-4923-9e36-c3edd6c4e2a5 tempest-ImagesTestJSON-240028984 tempest-ImagesTestJSON-240028984-project-member] Releasing lock "[datastore2] devstack-image-cache_base/f684bb17-3ab2-4bd5-a19e-4c36c57d0ebe" {{(pid=61962) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1477.867930] env[61962]: DEBUG nova.virt.vmwareapi.vmops [None req-b03c4c30-9fd6-4923-9e36-c3edd6c4e2a5 tempest-ImagesTestJSON-240028984 tempest-ImagesTestJSON-240028984-project-member] [instance: 1035139f-2193-4d1c-a220-369ef6096ae0] Processing image f684bb17-3ab2-4bd5-a19e-4c36c57d0ebe {{(pid=61962) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1477.868166] env[61962]: DEBUG oslo_concurrency.lockutils [None req-b03c4c30-9fd6-4923-9e36-c3edd6c4e2a5 tempest-ImagesTestJSON-240028984 tempest-ImagesTestJSON-240028984-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/f684bb17-3ab2-4bd5-a19e-4c36c57d0ebe/f684bb17-3ab2-4bd5-a19e-4c36c57d0ebe.vmdk" {{(pid=61962) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1479.159115] env[61962]: DEBUG oslo_service.periodic_task [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=61962) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1480.158692] env[61962]: DEBUG oslo_service.periodic_task [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=61962) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1482.018551] env[61962]: DEBUG oslo_concurrency.lockutils [None req-99153f52-9f3c-48b2-8848-6a1d1761c32a tempest-ImagesTestJSON-240028984 tempest-ImagesTestJSON-240028984-project-member] Acquiring lock "1035139f-2193-4d1c-a220-369ef6096ae0" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=61962) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1506.480282] env[61962]: DEBUG oslo_concurrency.lockutils [None req-e364545b-7bcc-40d5-a3ad-56d3b52a7b42 tempest-ServersTestJSON-536512511 tempest-ServersTestJSON-536512511-project-member] Acquiring lock "02c198ea-61de-4e70-866c-39781d353d04" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61962) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1506.480647] env[61962]: DEBUG oslo_concurrency.lockutils [None req-e364545b-7bcc-40d5-a3ad-56d3b52a7b42 tempest-ServersTestJSON-536512511 tempest-ServersTestJSON-536512511-project-member] Lock "02c198ea-61de-4e70-866c-39781d353d04" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61962) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1522.660515] env[61962]: WARNING oslo_vmware.rw_handles [None req-78c50ddb-9a5d-49c4-8b81-91657e42e15a tempest-ServersTestJSON-410785708 tempest-ServersTestJSON-410785708-project-member] Error occurred while reading the HTTP response.: http.client.RemoteDisconnected: Remote end closed connection without response [ 1522.660515] env[61962]: ERROR oslo_vmware.rw_handles Traceback (most recent call last): [ 1522.660515] env[61962]: ERROR oslo_vmware.rw_handles File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py", line 283, in close [ 1522.660515] env[61962]: ERROR oslo_vmware.rw_handles self._conn.getresponse() [ 1522.660515] env[61962]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 1375, in getresponse [ 1522.660515] env[61962]: ERROR oslo_vmware.rw_handles response.begin() [ 1522.660515] env[61962]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 318, in begin [ 1522.660515] env[61962]: ERROR oslo_vmware.rw_handles version, status, reason = self._read_status() [ 1522.660515] env[61962]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 287, in _read_status [ 1522.660515] env[61962]: ERROR oslo_vmware.rw_handles raise RemoteDisconnected("Remote end closed connection without" [ 1522.660515] env[61962]: ERROR oslo_vmware.rw_handles http.client.RemoteDisconnected: Remote end closed connection without response [ 1522.660515] env[61962]: ERROR oslo_vmware.rw_handles [ 1522.661338] env[61962]: DEBUG nova.virt.vmwareapi.images [None req-78c50ddb-9a5d-49c4-8b81-91657e42e15a tempest-ServersTestJSON-410785708 tempest-ServersTestJSON-410785708-project-member] [instance: 8ecd73a7-7f23-4994-85b9-479584831687] Downloaded image file data f684bb17-3ab2-4bd5-a19e-4c36c57d0ebe to vmware_temp/ccd95c83-38c9-4bfe-8c36-14ebfa4b6aca/f684bb17-3ab2-4bd5-a19e-4c36c57d0ebe/tmp-sparse.vmdk on the data store datastore2 {{(pid=61962) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:258}} [ 1522.662946] env[61962]: DEBUG nova.virt.vmwareapi.vmops [None req-78c50ddb-9a5d-49c4-8b81-91657e42e15a tempest-ServersTestJSON-410785708 tempest-ServersTestJSON-410785708-project-member] [instance: 8ecd73a7-7f23-4994-85b9-479584831687] Caching image {{(pid=61962) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 1522.663232] env[61962]: DEBUG nova.virt.vmwareapi.vm_util [None req-78c50ddb-9a5d-49c4-8b81-91657e42e15a tempest-ServersTestJSON-410785708 tempest-ServersTestJSON-410785708-project-member] Copying Virtual Disk [datastore2] vmware_temp/ccd95c83-38c9-4bfe-8c36-14ebfa4b6aca/f684bb17-3ab2-4bd5-a19e-4c36c57d0ebe/tmp-sparse.vmdk to [datastore2] vmware_temp/ccd95c83-38c9-4bfe-8c36-14ebfa4b6aca/f684bb17-3ab2-4bd5-a19e-4c36c57d0ebe/f684bb17-3ab2-4bd5-a19e-4c36c57d0ebe.vmdk {{(pid=61962) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 1522.663610] env[61962]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-126dc710-fc37-415c-bb9e-0eeb13ccc561 {{(pid=61962) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1522.671894] env[61962]: DEBUG oslo_vmware.api [None req-78c50ddb-9a5d-49c4-8b81-91657e42e15a tempest-ServersTestJSON-410785708 tempest-ServersTestJSON-410785708-project-member] Waiting for the task: (returnval){ [ 1522.671894] env[61962]: value = "task-4892046" [ 1522.671894] env[61962]: _type = "Task" [ 1522.671894] env[61962]: } to complete. {{(pid=61962) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1522.681692] env[61962]: DEBUG oslo_vmware.api [None req-78c50ddb-9a5d-49c4-8b81-91657e42e15a tempest-ServersTestJSON-410785708 tempest-ServersTestJSON-410785708-project-member] Task: {'id': task-4892046, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61962) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1523.182430] env[61962]: DEBUG oslo_vmware.exceptions [None req-78c50ddb-9a5d-49c4-8b81-91657e42e15a tempest-ServersTestJSON-410785708 tempest-ServersTestJSON-410785708-project-member] Fault InvalidArgument not matched. {{(pid=61962) get_fault_class /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/exceptions.py:290}} [ 1523.182759] env[61962]: DEBUG oslo_concurrency.lockutils [None req-78c50ddb-9a5d-49c4-8b81-91657e42e15a tempest-ServersTestJSON-410785708 tempest-ServersTestJSON-410785708-project-member] Releasing lock "[datastore2] devstack-image-cache_base/f684bb17-3ab2-4bd5-a19e-4c36c57d0ebe/f684bb17-3ab2-4bd5-a19e-4c36c57d0ebe.vmdk" {{(pid=61962) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1523.183288] env[61962]: ERROR nova.compute.manager [None req-78c50ddb-9a5d-49c4-8b81-91657e42e15a tempest-ServersTestJSON-410785708 tempest-ServersTestJSON-410785708-project-member] [instance: 8ecd73a7-7f23-4994-85b9-479584831687] Instance failed to spawn: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1523.183288] env[61962]: Faults: ['InvalidArgument'] [ 1523.183288] env[61962]: ERROR nova.compute.manager [instance: 8ecd73a7-7f23-4994-85b9-479584831687] Traceback (most recent call last): [ 1523.183288] env[61962]: ERROR nova.compute.manager [instance: 8ecd73a7-7f23-4994-85b9-479584831687] File "/opt/stack/nova/nova/compute/manager.py", line 2886, in _build_resources [ 1523.183288] env[61962]: ERROR nova.compute.manager [instance: 8ecd73a7-7f23-4994-85b9-479584831687] yield resources [ 1523.183288] env[61962]: ERROR nova.compute.manager [instance: 8ecd73a7-7f23-4994-85b9-479584831687] File "/opt/stack/nova/nova/compute/manager.py", line 2633, in _build_and_run_instance [ 1523.183288] env[61962]: ERROR nova.compute.manager [instance: 8ecd73a7-7f23-4994-85b9-479584831687] self.driver.spawn(context, instance, image_meta, [ 1523.183288] env[61962]: ERROR nova.compute.manager [instance: 8ecd73a7-7f23-4994-85b9-479584831687] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 1523.183288] env[61962]: ERROR nova.compute.manager [instance: 8ecd73a7-7f23-4994-85b9-479584831687] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1523.183288] env[61962]: ERROR nova.compute.manager [instance: 8ecd73a7-7f23-4994-85b9-479584831687] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 1523.183288] env[61962]: ERROR nova.compute.manager [instance: 8ecd73a7-7f23-4994-85b9-479584831687] self._fetch_image_if_missing(context, vi) [ 1523.183288] env[61962]: ERROR nova.compute.manager [instance: 8ecd73a7-7f23-4994-85b9-479584831687] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 1523.183288] env[61962]: ERROR nova.compute.manager [instance: 8ecd73a7-7f23-4994-85b9-479584831687] image_cache(vi, tmp_image_ds_loc) [ 1523.183731] env[61962]: ERROR nova.compute.manager [instance: 8ecd73a7-7f23-4994-85b9-479584831687] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 1523.183731] env[61962]: ERROR nova.compute.manager [instance: 8ecd73a7-7f23-4994-85b9-479584831687] vm_util.copy_virtual_disk( [ 1523.183731] env[61962]: ERROR nova.compute.manager [instance: 8ecd73a7-7f23-4994-85b9-479584831687] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 1523.183731] env[61962]: ERROR nova.compute.manager [instance: 8ecd73a7-7f23-4994-85b9-479584831687] session._wait_for_task(vmdk_copy_task) [ 1523.183731] env[61962]: ERROR nova.compute.manager [instance: 8ecd73a7-7f23-4994-85b9-479584831687] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 1523.183731] env[61962]: ERROR nova.compute.manager [instance: 8ecd73a7-7f23-4994-85b9-479584831687] return self.wait_for_task(task_ref) [ 1523.183731] env[61962]: ERROR nova.compute.manager [instance: 8ecd73a7-7f23-4994-85b9-479584831687] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 1523.183731] env[61962]: ERROR nova.compute.manager [instance: 8ecd73a7-7f23-4994-85b9-479584831687] return evt.wait() [ 1523.183731] env[61962]: ERROR nova.compute.manager [instance: 8ecd73a7-7f23-4994-85b9-479584831687] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 1523.183731] env[61962]: ERROR nova.compute.manager [instance: 8ecd73a7-7f23-4994-85b9-479584831687] result = hub.switch() [ 1523.183731] env[61962]: ERROR nova.compute.manager [instance: 8ecd73a7-7f23-4994-85b9-479584831687] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 1523.183731] env[61962]: ERROR nova.compute.manager [instance: 8ecd73a7-7f23-4994-85b9-479584831687] return self.greenlet.switch() [ 1523.183731] env[61962]: ERROR nova.compute.manager [instance: 8ecd73a7-7f23-4994-85b9-479584831687] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 1523.184177] env[61962]: ERROR nova.compute.manager [instance: 8ecd73a7-7f23-4994-85b9-479584831687] self.f(*self.args, **self.kw) [ 1523.184177] env[61962]: ERROR nova.compute.manager [instance: 8ecd73a7-7f23-4994-85b9-479584831687] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 1523.184177] env[61962]: ERROR nova.compute.manager [instance: 8ecd73a7-7f23-4994-85b9-479584831687] raise exceptions.translate_fault(task_info.error) [ 1523.184177] env[61962]: ERROR nova.compute.manager [instance: 8ecd73a7-7f23-4994-85b9-479584831687] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1523.184177] env[61962]: ERROR nova.compute.manager [instance: 8ecd73a7-7f23-4994-85b9-479584831687] Faults: ['InvalidArgument'] [ 1523.184177] env[61962]: ERROR nova.compute.manager [instance: 8ecd73a7-7f23-4994-85b9-479584831687] [ 1523.184177] env[61962]: INFO nova.compute.manager [None req-78c50ddb-9a5d-49c4-8b81-91657e42e15a tempest-ServersTestJSON-410785708 tempest-ServersTestJSON-410785708-project-member] [instance: 8ecd73a7-7f23-4994-85b9-479584831687] Terminating instance [ 1523.185334] env[61962]: DEBUG oslo_concurrency.lockutils [None req-b93286ff-68e9-402b-952c-c77dbd100087 tempest-ImagesTestJSON-240028984 tempest-ImagesTestJSON-240028984-project-member] Acquired lock "[datastore2] devstack-image-cache_base/f684bb17-3ab2-4bd5-a19e-4c36c57d0ebe/f684bb17-3ab2-4bd5-a19e-4c36c57d0ebe.vmdk" {{(pid=61962) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1523.185535] env[61962]: DEBUG nova.virt.vmwareapi.ds_util [None req-b93286ff-68e9-402b-952c-c77dbd100087 tempest-ImagesTestJSON-240028984 tempest-ImagesTestJSON-240028984-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=61962) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1523.185777] env[61962]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-23b23f50-65a6-4c5c-87d5-047426c0d91c {{(pid=61962) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1523.187997] env[61962]: DEBUG nova.compute.manager [None req-78c50ddb-9a5d-49c4-8b81-91657e42e15a tempest-ServersTestJSON-410785708 tempest-ServersTestJSON-410785708-project-member] [instance: 8ecd73a7-7f23-4994-85b9-479584831687] Start destroying the instance on the hypervisor. {{(pid=61962) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3142}} [ 1523.188198] env[61962]: DEBUG nova.virt.vmwareapi.vmops [None req-78c50ddb-9a5d-49c4-8b81-91657e42e15a tempest-ServersTestJSON-410785708 tempest-ServersTestJSON-410785708-project-member] [instance: 8ecd73a7-7f23-4994-85b9-479584831687] Destroying instance {{(pid=61962) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1523.188915] env[61962]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-67c06e89-d339-49a3-be28-4abf00ffdd88 {{(pid=61962) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1523.196331] env[61962]: DEBUG nova.virt.vmwareapi.vmops [None req-78c50ddb-9a5d-49c4-8b81-91657e42e15a tempest-ServersTestJSON-410785708 tempest-ServersTestJSON-410785708-project-member] [instance: 8ecd73a7-7f23-4994-85b9-479584831687] Unregistering the VM {{(pid=61962) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 1523.196539] env[61962]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-f8d58c2c-02ca-4d72-a61f-af569212ec3c {{(pid=61962) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1523.198939] env[61962]: DEBUG nova.virt.vmwareapi.ds_util [None req-b93286ff-68e9-402b-952c-c77dbd100087 tempest-ImagesTestJSON-240028984 tempest-ImagesTestJSON-240028984-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=61962) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1523.199119] env[61962]: DEBUG nova.virt.vmwareapi.vmops [None req-b93286ff-68e9-402b-952c-c77dbd100087 tempest-ImagesTestJSON-240028984 tempest-ImagesTestJSON-240028984-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=61962) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 1523.200087] env[61962]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-0a857f94-2003-494a-9988-c5d8064bd53f {{(pid=61962) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1523.205807] env[61962]: DEBUG oslo_vmware.api [None req-b93286ff-68e9-402b-952c-c77dbd100087 tempest-ImagesTestJSON-240028984 tempest-ImagesTestJSON-240028984-project-member] Waiting for the task: (returnval){ [ 1523.205807] env[61962]: value = "session[5210918f-aadc-9b29-42fa-0929c2e42627]5208e363-469f-c086-23fe-082e31883f1d" [ 1523.205807] env[61962]: _type = "Task" [ 1523.205807] env[61962]: } to complete. {{(pid=61962) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1523.213984] env[61962]: DEBUG oslo_vmware.api [None req-b93286ff-68e9-402b-952c-c77dbd100087 tempest-ImagesTestJSON-240028984 tempest-ImagesTestJSON-240028984-project-member] Task: {'id': session[5210918f-aadc-9b29-42fa-0929c2e42627]5208e363-469f-c086-23fe-082e31883f1d, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61962) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1523.279734] env[61962]: DEBUG nova.virt.vmwareapi.vmops [None req-78c50ddb-9a5d-49c4-8b81-91657e42e15a tempest-ServersTestJSON-410785708 tempest-ServersTestJSON-410785708-project-member] [instance: 8ecd73a7-7f23-4994-85b9-479584831687] Unregistered the VM {{(pid=61962) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 1523.280042] env[61962]: DEBUG nova.virt.vmwareapi.vmops [None req-78c50ddb-9a5d-49c4-8b81-91657e42e15a tempest-ServersTestJSON-410785708 tempest-ServersTestJSON-410785708-project-member] [instance: 8ecd73a7-7f23-4994-85b9-479584831687] Deleting contents of the VM from datastore datastore2 {{(pid=61962) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 1523.280147] env[61962]: DEBUG nova.virt.vmwareapi.ds_util [None req-78c50ddb-9a5d-49c4-8b81-91657e42e15a tempest-ServersTestJSON-410785708 tempest-ServersTestJSON-410785708-project-member] Deleting the datastore file [datastore2] 8ecd73a7-7f23-4994-85b9-479584831687 {{(pid=61962) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1523.280396] env[61962]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-eab49228-2191-4b98-b464-3b49e97abab9 {{(pid=61962) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1523.288074] env[61962]: DEBUG oslo_vmware.api [None req-78c50ddb-9a5d-49c4-8b81-91657e42e15a tempest-ServersTestJSON-410785708 tempest-ServersTestJSON-410785708-project-member] Waiting for the task: (returnval){ [ 1523.288074] env[61962]: value = "task-4892048" [ 1523.288074] env[61962]: _type = "Task" [ 1523.288074] env[61962]: } to complete. {{(pid=61962) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1523.299944] env[61962]: DEBUG oslo_vmware.api [None req-78c50ddb-9a5d-49c4-8b81-91657e42e15a tempest-ServersTestJSON-410785708 tempest-ServersTestJSON-410785708-project-member] Task: {'id': task-4892048, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61962) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1523.716140] env[61962]: DEBUG nova.virt.vmwareapi.vmops [None req-b93286ff-68e9-402b-952c-c77dbd100087 tempest-ImagesTestJSON-240028984 tempest-ImagesTestJSON-240028984-project-member] [instance: d89abe69-dd71-4cfe-b80b-3b2aeba0b8a9] Preparing fetch location {{(pid=61962) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 1523.716522] env[61962]: DEBUG nova.virt.vmwareapi.ds_util [None req-b93286ff-68e9-402b-952c-c77dbd100087 tempest-ImagesTestJSON-240028984 tempest-ImagesTestJSON-240028984-project-member] Creating directory with path [datastore2] vmware_temp/d02fc654-1c96-4c2b-9c75-85bace6ca59a/f684bb17-3ab2-4bd5-a19e-4c36c57d0ebe {{(pid=61962) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1523.716663] env[61962]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-33c4efd9-4f3e-4095-8d34-e9fb3adf367c {{(pid=61962) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1523.730635] env[61962]: DEBUG nova.virt.vmwareapi.ds_util [None req-b93286ff-68e9-402b-952c-c77dbd100087 tempest-ImagesTestJSON-240028984 tempest-ImagesTestJSON-240028984-project-member] Created directory with path [datastore2] vmware_temp/d02fc654-1c96-4c2b-9c75-85bace6ca59a/f684bb17-3ab2-4bd5-a19e-4c36c57d0ebe {{(pid=61962) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1523.730861] env[61962]: DEBUG nova.virt.vmwareapi.vmops [None req-b93286ff-68e9-402b-952c-c77dbd100087 tempest-ImagesTestJSON-240028984 tempest-ImagesTestJSON-240028984-project-member] [instance: d89abe69-dd71-4cfe-b80b-3b2aeba0b8a9] Fetch image to [datastore2] vmware_temp/d02fc654-1c96-4c2b-9c75-85bace6ca59a/f684bb17-3ab2-4bd5-a19e-4c36c57d0ebe/tmp-sparse.vmdk {{(pid=61962) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 1523.731048] env[61962]: DEBUG nova.virt.vmwareapi.vmops [None req-b93286ff-68e9-402b-952c-c77dbd100087 tempest-ImagesTestJSON-240028984 tempest-ImagesTestJSON-240028984-project-member] [instance: d89abe69-dd71-4cfe-b80b-3b2aeba0b8a9] Downloading image file data f684bb17-3ab2-4bd5-a19e-4c36c57d0ebe to [datastore2] vmware_temp/d02fc654-1c96-4c2b-9c75-85bace6ca59a/f684bb17-3ab2-4bd5-a19e-4c36c57d0ebe/tmp-sparse.vmdk on the data store datastore2 {{(pid=61962) _fetch_image_as_file /opt/stack/nova/nova/virt/vmwareapi/vmops.py:399}} [ 1523.731902] env[61962]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ae7e29e3-7c4e-4619-9fbe-a69ba5711bed {{(pid=61962) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1523.739872] env[61962]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d95dfe0a-4ae2-4034-83ee-0ffad214f89d {{(pid=61962) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1523.751062] env[61962]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-af93a417-1758-4961-8a7b-755a80a1c8c4 {{(pid=61962) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1523.783645] env[61962]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0b7ce42a-06ee-4e02-872d-524237c2c873 {{(pid=61962) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1523.793117] env[61962]: DEBUG oslo_vmware.service [-] Invoking SessionManager.AcquireGenericServiceTicket with opID=oslo.vmware-5b8930e4-14c9-4e61-bb35-6625118c742f {{(pid=61962) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1523.805158] env[61962]: DEBUG oslo_vmware.api [None req-78c50ddb-9a5d-49c4-8b81-91657e42e15a tempest-ServersTestJSON-410785708 tempest-ServersTestJSON-410785708-project-member] Task: {'id': task-4892048, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.075403} completed successfully. {{(pid=61962) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1523.805404] env[61962]: DEBUG nova.virt.vmwareapi.ds_util [None req-78c50ddb-9a5d-49c4-8b81-91657e42e15a tempest-ServersTestJSON-410785708 tempest-ServersTestJSON-410785708-project-member] Deleted the datastore file {{(pid=61962) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1523.805586] env[61962]: DEBUG nova.virt.vmwareapi.vmops [None req-78c50ddb-9a5d-49c4-8b81-91657e42e15a tempest-ServersTestJSON-410785708 tempest-ServersTestJSON-410785708-project-member] [instance: 8ecd73a7-7f23-4994-85b9-479584831687] Deleted contents of the VM from datastore datastore2 {{(pid=61962) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 1523.805754] env[61962]: DEBUG nova.virt.vmwareapi.vmops [None req-78c50ddb-9a5d-49c4-8b81-91657e42e15a tempest-ServersTestJSON-410785708 tempest-ServersTestJSON-410785708-project-member] [instance: 8ecd73a7-7f23-4994-85b9-479584831687] Instance destroyed {{(pid=61962) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1523.805925] env[61962]: INFO nova.compute.manager [None req-78c50ddb-9a5d-49c4-8b81-91657e42e15a tempest-ServersTestJSON-410785708 tempest-ServersTestJSON-410785708-project-member] [instance: 8ecd73a7-7f23-4994-85b9-479584831687] Took 0.62 seconds to destroy the instance on the hypervisor. [ 1523.808281] env[61962]: DEBUG nova.compute.claims [None req-78c50ddb-9a5d-49c4-8b81-91657e42e15a tempest-ServersTestJSON-410785708 tempest-ServersTestJSON-410785708-project-member] [instance: 8ecd73a7-7f23-4994-85b9-479584831687] Aborting claim: {{(pid=61962) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 1523.808444] env[61962]: DEBUG oslo_concurrency.lockutils [None req-78c50ddb-9a5d-49c4-8b81-91657e42e15a tempest-ServersTestJSON-410785708 tempest-ServersTestJSON-410785708-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=61962) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1523.808655] env[61962]: DEBUG oslo_concurrency.lockutils [None req-78c50ddb-9a5d-49c4-8b81-91657e42e15a tempest-ServersTestJSON-410785708 tempest-ServersTestJSON-410785708-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 0.000s {{(pid=61962) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1523.823684] env[61962]: DEBUG nova.virt.vmwareapi.images [None req-b93286ff-68e9-402b-952c-c77dbd100087 tempest-ImagesTestJSON-240028984 tempest-ImagesTestJSON-240028984-project-member] [instance: d89abe69-dd71-4cfe-b80b-3b2aeba0b8a9] Downloading image file data f684bb17-3ab2-4bd5-a19e-4c36c57d0ebe to the data store datastore2 {{(pid=61962) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:245}} [ 1523.941249] env[61962]: DEBUG oslo_vmware.rw_handles [None req-b93286ff-68e9-402b-952c-c77dbd100087 tempest-ImagesTestJSON-240028984 tempest-ImagesTestJSON-240028984-project-member] Creating HTTP connection to write to file with size = 21318656 and URL = https://esx7c1n3.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/d02fc654-1c96-4c2b-9c75-85bace6ca59a/f684bb17-3ab2-4bd5-a19e-4c36c57d0ebe/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=61962) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 1524.005361] env[61962]: DEBUG oslo_vmware.rw_handles [None req-b93286ff-68e9-402b-952c-c77dbd100087 tempest-ImagesTestJSON-240028984 tempest-ImagesTestJSON-240028984-project-member] Completed reading data from the image iterator. {{(pid=61962) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 1524.006029] env[61962]: DEBUG oslo_vmware.rw_handles [None req-b93286ff-68e9-402b-952c-c77dbd100087 tempest-ImagesTestJSON-240028984 tempest-ImagesTestJSON-240028984-project-member] Closing write handle for https://esx7c1n3.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/d02fc654-1c96-4c2b-9c75-85bace6ca59a/f684bb17-3ab2-4bd5-a19e-4c36c57d0ebe/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=61962) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:281}} [ 1524.169281] env[61962]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a80b085f-2e6e-4593-ab83-c9daac5ca9e2 {{(pid=61962) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1524.177825] env[61962]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-531d8dce-e566-431c-9db1-1ec03aff5315 {{(pid=61962) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1524.209915] env[61962]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c890d315-ddf4-41ea-8b05-b443f59ce5c1 {{(pid=61962) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1524.217805] env[61962]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8940a1c7-73f9-45b3-90a7-3946f4c5f9e3 {{(pid=61962) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1524.231385] env[61962]: DEBUG nova.compute.provider_tree [None req-78c50ddb-9a5d-49c4-8b81-91657e42e15a tempest-ServersTestJSON-410785708 tempest-ServersTestJSON-410785708-project-member] Inventory has not changed in ProviderTree for provider: 5a20dc57-fddd-49ec-bab5-953a03eebaa1 {{(pid=61962) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1524.241174] env[61962]: DEBUG nova.scheduler.client.report [None req-78c50ddb-9a5d-49c4-8b81-91657e42e15a tempest-ServersTestJSON-410785708 tempest-ServersTestJSON-410785708-project-member] Inventory has not changed for provider 5a20dc57-fddd-49ec-bab5-953a03eebaa1 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 96, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61962) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1524.256700] env[61962]: DEBUG oslo_concurrency.lockutils [None req-78c50ddb-9a5d-49c4-8b81-91657e42e15a tempest-ServersTestJSON-410785708 tempest-ServersTestJSON-410785708-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 0.448s {{(pid=61962) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1524.257346] env[61962]: ERROR nova.compute.manager [None req-78c50ddb-9a5d-49c4-8b81-91657e42e15a tempest-ServersTestJSON-410785708 tempest-ServersTestJSON-410785708-project-member] [instance: 8ecd73a7-7f23-4994-85b9-479584831687] Failed to build and run instance: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1524.257346] env[61962]: Faults: ['InvalidArgument'] [ 1524.257346] env[61962]: ERROR nova.compute.manager [instance: 8ecd73a7-7f23-4994-85b9-479584831687] Traceback (most recent call last): [ 1524.257346] env[61962]: ERROR nova.compute.manager [instance: 8ecd73a7-7f23-4994-85b9-479584831687] File "/opt/stack/nova/nova/compute/manager.py", line 2633, in _build_and_run_instance [ 1524.257346] env[61962]: ERROR nova.compute.manager [instance: 8ecd73a7-7f23-4994-85b9-479584831687] self.driver.spawn(context, instance, image_meta, [ 1524.257346] env[61962]: ERROR nova.compute.manager [instance: 8ecd73a7-7f23-4994-85b9-479584831687] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 1524.257346] env[61962]: ERROR nova.compute.manager [instance: 8ecd73a7-7f23-4994-85b9-479584831687] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1524.257346] env[61962]: ERROR nova.compute.manager [instance: 8ecd73a7-7f23-4994-85b9-479584831687] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 1524.257346] env[61962]: ERROR nova.compute.manager [instance: 8ecd73a7-7f23-4994-85b9-479584831687] self._fetch_image_if_missing(context, vi) [ 1524.257346] env[61962]: ERROR nova.compute.manager [instance: 8ecd73a7-7f23-4994-85b9-479584831687] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 1524.257346] env[61962]: ERROR nova.compute.manager [instance: 8ecd73a7-7f23-4994-85b9-479584831687] image_cache(vi, tmp_image_ds_loc) [ 1524.257346] env[61962]: ERROR nova.compute.manager [instance: 8ecd73a7-7f23-4994-85b9-479584831687] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 1524.257880] env[61962]: ERROR nova.compute.manager [instance: 8ecd73a7-7f23-4994-85b9-479584831687] vm_util.copy_virtual_disk( [ 1524.257880] env[61962]: ERROR nova.compute.manager [instance: 8ecd73a7-7f23-4994-85b9-479584831687] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 1524.257880] env[61962]: ERROR nova.compute.manager [instance: 8ecd73a7-7f23-4994-85b9-479584831687] session._wait_for_task(vmdk_copy_task) [ 1524.257880] env[61962]: ERROR nova.compute.manager [instance: 8ecd73a7-7f23-4994-85b9-479584831687] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 1524.257880] env[61962]: ERROR nova.compute.manager [instance: 8ecd73a7-7f23-4994-85b9-479584831687] return self.wait_for_task(task_ref) [ 1524.257880] env[61962]: ERROR nova.compute.manager [instance: 8ecd73a7-7f23-4994-85b9-479584831687] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 1524.257880] env[61962]: ERROR nova.compute.manager [instance: 8ecd73a7-7f23-4994-85b9-479584831687] return evt.wait() [ 1524.257880] env[61962]: ERROR nova.compute.manager [instance: 8ecd73a7-7f23-4994-85b9-479584831687] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 1524.257880] env[61962]: ERROR nova.compute.manager [instance: 8ecd73a7-7f23-4994-85b9-479584831687] result = hub.switch() [ 1524.257880] env[61962]: ERROR nova.compute.manager [instance: 8ecd73a7-7f23-4994-85b9-479584831687] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 1524.257880] env[61962]: ERROR nova.compute.manager [instance: 8ecd73a7-7f23-4994-85b9-479584831687] return self.greenlet.switch() [ 1524.257880] env[61962]: ERROR nova.compute.manager [instance: 8ecd73a7-7f23-4994-85b9-479584831687] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 1524.257880] env[61962]: ERROR nova.compute.manager [instance: 8ecd73a7-7f23-4994-85b9-479584831687] self.f(*self.args, **self.kw) [ 1524.258307] env[61962]: ERROR nova.compute.manager [instance: 8ecd73a7-7f23-4994-85b9-479584831687] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 1524.258307] env[61962]: ERROR nova.compute.manager [instance: 8ecd73a7-7f23-4994-85b9-479584831687] raise exceptions.translate_fault(task_info.error) [ 1524.258307] env[61962]: ERROR nova.compute.manager [instance: 8ecd73a7-7f23-4994-85b9-479584831687] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1524.258307] env[61962]: ERROR nova.compute.manager [instance: 8ecd73a7-7f23-4994-85b9-479584831687] Faults: ['InvalidArgument'] [ 1524.258307] env[61962]: ERROR nova.compute.manager [instance: 8ecd73a7-7f23-4994-85b9-479584831687] [ 1524.258307] env[61962]: DEBUG nova.compute.utils [None req-78c50ddb-9a5d-49c4-8b81-91657e42e15a tempest-ServersTestJSON-410785708 tempest-ServersTestJSON-410785708-project-member] [instance: 8ecd73a7-7f23-4994-85b9-479584831687] VimFaultException {{(pid=61962) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 1524.260131] env[61962]: DEBUG nova.compute.manager [None req-78c50ddb-9a5d-49c4-8b81-91657e42e15a tempest-ServersTestJSON-410785708 tempest-ServersTestJSON-410785708-project-member] [instance: 8ecd73a7-7f23-4994-85b9-479584831687] Build of instance 8ecd73a7-7f23-4994-85b9-479584831687 was re-scheduled: A specified parameter was not correct: fileType [ 1524.260131] env[61962]: Faults: ['InvalidArgument'] {{(pid=61962) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2472}} [ 1524.260537] env[61962]: DEBUG nova.compute.manager [None req-78c50ddb-9a5d-49c4-8b81-91657e42e15a tempest-ServersTestJSON-410785708 tempest-ServersTestJSON-410785708-project-member] [instance: 8ecd73a7-7f23-4994-85b9-479584831687] Unplugging VIFs for instance {{(pid=61962) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:2998}} [ 1524.260719] env[61962]: DEBUG nova.compute.manager [None req-78c50ddb-9a5d-49c4-8b81-91657e42e15a tempest-ServersTestJSON-410785708 tempest-ServersTestJSON-410785708-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=61962) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3021}} [ 1524.260924] env[61962]: DEBUG nova.compute.manager [None req-78c50ddb-9a5d-49c4-8b81-91657e42e15a tempest-ServersTestJSON-410785708 tempest-ServersTestJSON-410785708-project-member] [instance: 8ecd73a7-7f23-4994-85b9-479584831687] Deallocating network for instance {{(pid=61962) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2281}} [ 1524.261112] env[61962]: DEBUG nova.network.neutron [None req-78c50ddb-9a5d-49c4-8b81-91657e42e15a tempest-ServersTestJSON-410785708 tempest-ServersTestJSON-410785708-project-member] [instance: 8ecd73a7-7f23-4994-85b9-479584831687] deallocate_for_instance() {{(pid=61962) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1524.606740] env[61962]: DEBUG nova.network.neutron [None req-78c50ddb-9a5d-49c4-8b81-91657e42e15a tempest-ServersTestJSON-410785708 tempest-ServersTestJSON-410785708-project-member] [instance: 8ecd73a7-7f23-4994-85b9-479584831687] Updating instance_info_cache with network_info: [] {{(pid=61962) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1524.619710] env[61962]: INFO nova.compute.manager [None req-78c50ddb-9a5d-49c4-8b81-91657e42e15a tempest-ServersTestJSON-410785708 tempest-ServersTestJSON-410785708-project-member] [instance: 8ecd73a7-7f23-4994-85b9-479584831687] Took 0.36 seconds to deallocate network for instance. [ 1524.720518] env[61962]: INFO nova.scheduler.client.report [None req-78c50ddb-9a5d-49c4-8b81-91657e42e15a tempest-ServersTestJSON-410785708 tempest-ServersTestJSON-410785708-project-member] Deleted allocations for instance 8ecd73a7-7f23-4994-85b9-479584831687 [ 1524.742806] env[61962]: DEBUG oslo_concurrency.lockutils [None req-78c50ddb-9a5d-49c4-8b81-91657e42e15a tempest-ServersTestJSON-410785708 tempest-ServersTestJSON-410785708-project-member] Lock "8ecd73a7-7f23-4994-85b9-479584831687" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 684.216s {{(pid=61962) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1524.744142] env[61962]: DEBUG oslo_concurrency.lockutils [None req-b2413e23-7cb0-4588-8f03-0e3d7761bb40 tempest-ServersTestJSON-410785708 tempest-ServersTestJSON-410785708-project-member] Lock "8ecd73a7-7f23-4994-85b9-479584831687" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 487.695s {{(pid=61962) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1524.744556] env[61962]: DEBUG oslo_concurrency.lockutils [None req-b2413e23-7cb0-4588-8f03-0e3d7761bb40 tempest-ServersTestJSON-410785708 tempest-ServersTestJSON-410785708-project-member] Acquiring lock "8ecd73a7-7f23-4994-85b9-479584831687-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=61962) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1524.744625] env[61962]: DEBUG oslo_concurrency.lockutils [None req-b2413e23-7cb0-4588-8f03-0e3d7761bb40 tempest-ServersTestJSON-410785708 tempest-ServersTestJSON-410785708-project-member] Lock "8ecd73a7-7f23-4994-85b9-479584831687-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=61962) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1524.744759] env[61962]: DEBUG oslo_concurrency.lockutils [None req-b2413e23-7cb0-4588-8f03-0e3d7761bb40 tempest-ServersTestJSON-410785708 tempest-ServersTestJSON-410785708-project-member] Lock "8ecd73a7-7f23-4994-85b9-479584831687-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=61962) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1524.748255] env[61962]: INFO nova.compute.manager [None req-b2413e23-7cb0-4588-8f03-0e3d7761bb40 tempest-ServersTestJSON-410785708 tempest-ServersTestJSON-410785708-project-member] [instance: 8ecd73a7-7f23-4994-85b9-479584831687] Terminating instance [ 1524.750219] env[61962]: DEBUG oslo_concurrency.lockutils [None req-b2413e23-7cb0-4588-8f03-0e3d7761bb40 tempest-ServersTestJSON-410785708 tempest-ServersTestJSON-410785708-project-member] Acquiring lock "refresh_cache-8ecd73a7-7f23-4994-85b9-479584831687" {{(pid=61962) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1524.750395] env[61962]: DEBUG oslo_concurrency.lockutils [None req-b2413e23-7cb0-4588-8f03-0e3d7761bb40 tempest-ServersTestJSON-410785708 tempest-ServersTestJSON-410785708-project-member] Acquired lock "refresh_cache-8ecd73a7-7f23-4994-85b9-479584831687" {{(pid=61962) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1524.750576] env[61962]: DEBUG nova.network.neutron [None req-b2413e23-7cb0-4588-8f03-0e3d7761bb40 tempest-ServersTestJSON-410785708 tempest-ServersTestJSON-410785708-project-member] [instance: 8ecd73a7-7f23-4994-85b9-479584831687] Building network info cache for instance {{(pid=61962) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 1524.778906] env[61962]: DEBUG nova.compute.manager [None req-e21c42e3-3594-408e-b517-f54e516b53cd tempest-ServersNegativeTestMultiTenantJSON-212472390 tempest-ServersNegativeTestMultiTenantJSON-212472390-project-member] [instance: 53048279-c3ea-484f-9a3e-049b12892f7a] Starting instance... {{(pid=61962) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2424}} [ 1524.785191] env[61962]: DEBUG nova.network.neutron [None req-b2413e23-7cb0-4588-8f03-0e3d7761bb40 tempest-ServersTestJSON-410785708 tempest-ServersTestJSON-410785708-project-member] [instance: 8ecd73a7-7f23-4994-85b9-479584831687] Instance cache missing network info. {{(pid=61962) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 1524.843913] env[61962]: DEBUG oslo_concurrency.lockutils [None req-e21c42e3-3594-408e-b517-f54e516b53cd tempest-ServersNegativeTestMultiTenantJSON-212472390 tempest-ServersNegativeTestMultiTenantJSON-212472390-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61962) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1524.844180] env[61962]: DEBUG oslo_concurrency.lockutils [None req-e21c42e3-3594-408e-b517-f54e516b53cd tempest-ServersNegativeTestMultiTenantJSON-212472390 tempest-ServersNegativeTestMultiTenantJSON-212472390-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=61962) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1524.845710] env[61962]: INFO nova.compute.claims [None req-e21c42e3-3594-408e-b517-f54e516b53cd tempest-ServersNegativeTestMultiTenantJSON-212472390 tempest-ServersNegativeTestMultiTenantJSON-212472390-project-member] [instance: 53048279-c3ea-484f-9a3e-049b12892f7a] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1524.965643] env[61962]: DEBUG nova.network.neutron [None req-b2413e23-7cb0-4588-8f03-0e3d7761bb40 tempest-ServersTestJSON-410785708 tempest-ServersTestJSON-410785708-project-member] [instance: 8ecd73a7-7f23-4994-85b9-479584831687] Updating instance_info_cache with network_info: [] {{(pid=61962) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1524.976695] env[61962]: DEBUG oslo_concurrency.lockutils [None req-b2413e23-7cb0-4588-8f03-0e3d7761bb40 tempest-ServersTestJSON-410785708 tempest-ServersTestJSON-410785708-project-member] Releasing lock "refresh_cache-8ecd73a7-7f23-4994-85b9-479584831687" {{(pid=61962) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1524.977191] env[61962]: DEBUG nova.compute.manager [None req-b2413e23-7cb0-4588-8f03-0e3d7761bb40 tempest-ServersTestJSON-410785708 tempest-ServersTestJSON-410785708-project-member] [instance: 8ecd73a7-7f23-4994-85b9-479584831687] Start destroying the instance on the hypervisor. {{(pid=61962) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3142}} [ 1524.977475] env[61962]: DEBUG nova.virt.vmwareapi.vmops [None req-b2413e23-7cb0-4588-8f03-0e3d7761bb40 tempest-ServersTestJSON-410785708 tempest-ServersTestJSON-410785708-project-member] [instance: 8ecd73a7-7f23-4994-85b9-479584831687] Destroying instance {{(pid=61962) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1524.978327] env[61962]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-d54a1e4e-80cf-43a1-8994-5ecb31b9cecf {{(pid=61962) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1524.988810] env[61962]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4b05a280-1b5b-44fc-84a8-87d6392a51f8 {{(pid=61962) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1525.027609] env[61962]: WARNING nova.virt.vmwareapi.vmops [None req-b2413e23-7cb0-4588-8f03-0e3d7761bb40 tempest-ServersTestJSON-410785708 tempest-ServersTestJSON-410785708-project-member] [instance: 8ecd73a7-7f23-4994-85b9-479584831687] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 8ecd73a7-7f23-4994-85b9-479584831687 could not be found. [ 1525.027829] env[61962]: DEBUG nova.virt.vmwareapi.vmops [None req-b2413e23-7cb0-4588-8f03-0e3d7761bb40 tempest-ServersTestJSON-410785708 tempest-ServersTestJSON-410785708-project-member] [instance: 8ecd73a7-7f23-4994-85b9-479584831687] Instance destroyed {{(pid=61962) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1525.028041] env[61962]: INFO nova.compute.manager [None req-b2413e23-7cb0-4588-8f03-0e3d7761bb40 tempest-ServersTestJSON-410785708 tempest-ServersTestJSON-410785708-project-member] [instance: 8ecd73a7-7f23-4994-85b9-479584831687] Took 0.05 seconds to destroy the instance on the hypervisor. [ 1525.028282] env[61962]: DEBUG oslo.service.loopingcall [None req-b2413e23-7cb0-4588-8f03-0e3d7761bb40 tempest-ServersTestJSON-410785708 tempest-ServersTestJSON-410785708-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61962) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1525.031038] env[61962]: DEBUG nova.compute.manager [-] [instance: 8ecd73a7-7f23-4994-85b9-479584831687] Deallocating network for instance {{(pid=61962) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2281}} [ 1525.031146] env[61962]: DEBUG nova.network.neutron [-] [instance: 8ecd73a7-7f23-4994-85b9-479584831687] deallocate_for_instance() {{(pid=61962) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1525.062044] env[61962]: DEBUG nova.network.neutron [-] [instance: 8ecd73a7-7f23-4994-85b9-479584831687] Instance cache missing network info. {{(pid=61962) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 1525.077575] env[61962]: DEBUG nova.network.neutron [-] [instance: 8ecd73a7-7f23-4994-85b9-479584831687] Updating instance_info_cache with network_info: [] {{(pid=61962) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1525.090188] env[61962]: INFO nova.compute.manager [-] [instance: 8ecd73a7-7f23-4994-85b9-479584831687] Took 0.06 seconds to deallocate network for instance. [ 1525.203349] env[61962]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9a504f0f-859b-45a7-8776-78c41f63f1d5 {{(pid=61962) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1525.214018] env[61962]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1f27ea12-e773-4e84-8e4a-43f7a8d03e8b {{(pid=61962) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1525.218827] env[61962]: DEBUG oslo_concurrency.lockutils [None req-b2413e23-7cb0-4588-8f03-0e3d7761bb40 tempest-ServersTestJSON-410785708 tempest-ServersTestJSON-410785708-project-member] Lock "8ecd73a7-7f23-4994-85b9-479584831687" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 0.475s {{(pid=61962) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1525.220427] env[61962]: DEBUG oslo_concurrency.lockutils [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Lock "8ecd73a7-7f23-4994-85b9-479584831687" acquired by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: waited 394.702s {{(pid=61962) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1525.220427] env[61962]: INFO nova.compute.manager [None req-ba337279-4320-40ca-b616-7e590432f203 None None] [instance: 8ecd73a7-7f23-4994-85b9-479584831687] During sync_power_state the instance has a pending task (deleting). Skip. [ 1525.220674] env[61962]: DEBUG oslo_concurrency.lockutils [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Lock "8ecd73a7-7f23-4994-85b9-479584831687" "released" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: held 0.001s {{(pid=61962) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1525.247905] env[61962]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3539ad9c-6ba6-43e7-bb67-545b7b878d1d {{(pid=61962) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1525.256622] env[61962]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0dfe037e-6498-4b7e-b8d0-acfcae035d85 {{(pid=61962) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1525.271153] env[61962]: DEBUG nova.compute.provider_tree [None req-e21c42e3-3594-408e-b517-f54e516b53cd tempest-ServersNegativeTestMultiTenantJSON-212472390 tempest-ServersNegativeTestMultiTenantJSON-212472390-project-member] Inventory has not changed in ProviderTree for provider: 5a20dc57-fddd-49ec-bab5-953a03eebaa1 {{(pid=61962) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1525.281710] env[61962]: DEBUG nova.scheduler.client.report [None req-e21c42e3-3594-408e-b517-f54e516b53cd tempest-ServersNegativeTestMultiTenantJSON-212472390 tempest-ServersNegativeTestMultiTenantJSON-212472390-project-member] Inventory has not changed for provider 5a20dc57-fddd-49ec-bab5-953a03eebaa1 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 96, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61962) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1525.297353] env[61962]: DEBUG oslo_concurrency.lockutils [None req-e21c42e3-3594-408e-b517-f54e516b53cd tempest-ServersNegativeTestMultiTenantJSON-212472390 tempest-ServersNegativeTestMultiTenantJSON-212472390-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.453s {{(pid=61962) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1525.297882] env[61962]: DEBUG nova.compute.manager [None req-e21c42e3-3594-408e-b517-f54e516b53cd tempest-ServersNegativeTestMultiTenantJSON-212472390 tempest-ServersNegativeTestMultiTenantJSON-212472390-project-member] [instance: 53048279-c3ea-484f-9a3e-049b12892f7a] Start building networks asynchronously for instance. {{(pid=61962) _build_resources /opt/stack/nova/nova/compute/manager.py:2821}} [ 1525.333890] env[61962]: DEBUG nova.compute.utils [None req-e21c42e3-3594-408e-b517-f54e516b53cd tempest-ServersNegativeTestMultiTenantJSON-212472390 tempest-ServersNegativeTestMultiTenantJSON-212472390-project-member] Using /dev/sd instead of None {{(pid=61962) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1525.335491] env[61962]: DEBUG nova.compute.manager [None req-e21c42e3-3594-408e-b517-f54e516b53cd tempest-ServersNegativeTestMultiTenantJSON-212472390 tempest-ServersNegativeTestMultiTenantJSON-212472390-project-member] [instance: 53048279-c3ea-484f-9a3e-049b12892f7a] Allocating IP information in the background. {{(pid=61962) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1973}} [ 1525.335721] env[61962]: DEBUG nova.network.neutron [None req-e21c42e3-3594-408e-b517-f54e516b53cd tempest-ServersNegativeTestMultiTenantJSON-212472390 tempest-ServersNegativeTestMultiTenantJSON-212472390-project-member] [instance: 53048279-c3ea-484f-9a3e-049b12892f7a] allocate_for_instance() {{(pid=61962) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 1525.345868] env[61962]: DEBUG nova.compute.manager [None req-e21c42e3-3594-408e-b517-f54e516b53cd tempest-ServersNegativeTestMultiTenantJSON-212472390 tempest-ServersNegativeTestMultiTenantJSON-212472390-project-member] [instance: 53048279-c3ea-484f-9a3e-049b12892f7a] Start building block device mappings for instance. {{(pid=61962) _build_resources /opt/stack/nova/nova/compute/manager.py:2856}} [ 1525.411740] env[61962]: DEBUG nova.policy [None req-e21c42e3-3594-408e-b517-f54e516b53cd tempest-ServersNegativeTestMultiTenantJSON-212472390 tempest-ServersNegativeTestMultiTenantJSON-212472390-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '6cf4009126a142f4bddc43d6f88084ae', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '88a7b37d54f047149b05fc80e0b51601', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61962) authorize /opt/stack/nova/nova/policy.py:203}} [ 1525.422590] env[61962]: DEBUG nova.compute.manager [None req-e21c42e3-3594-408e-b517-f54e516b53cd tempest-ServersNegativeTestMultiTenantJSON-212472390 tempest-ServersNegativeTestMultiTenantJSON-212472390-project-member] [instance: 53048279-c3ea-484f-9a3e-049b12892f7a] Start spawning the instance on the hypervisor. {{(pid=61962) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2630}} [ 1525.450297] env[61962]: DEBUG nova.virt.hardware [None req-e21c42e3-3594-408e-b517-f54e516b53cd tempest-ServersNegativeTestMultiTenantJSON-212472390 tempest-ServersNegativeTestMultiTenantJSON-212472390-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-12-01T12:09:23Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=128,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-12-01T12:09:07Z,direct_url=,disk_format='vmdk',id=f684bb17-3ab2-4bd5-a19e-4c36c57d0ebe,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='eef556fb5c4f49b889491ae31a496de5',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-12-01T12:09:08Z,virtual_size=,visibility=), allow threads: False {{(pid=61962) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1525.450758] env[61962]: DEBUG nova.virt.hardware [None req-e21c42e3-3594-408e-b517-f54e516b53cd tempest-ServersNegativeTestMultiTenantJSON-212472390 tempest-ServersNegativeTestMultiTenantJSON-212472390-project-member] Flavor limits 0:0:0 {{(pid=61962) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1525.450957] env[61962]: DEBUG nova.virt.hardware [None req-e21c42e3-3594-408e-b517-f54e516b53cd tempest-ServersNegativeTestMultiTenantJSON-212472390 tempest-ServersNegativeTestMultiTenantJSON-212472390-project-member] Image limits 0:0:0 {{(pid=61962) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1525.451167] env[61962]: DEBUG nova.virt.hardware [None req-e21c42e3-3594-408e-b517-f54e516b53cd tempest-ServersNegativeTestMultiTenantJSON-212472390 tempest-ServersNegativeTestMultiTenantJSON-212472390-project-member] Flavor pref 0:0:0 {{(pid=61962) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1525.451341] env[61962]: DEBUG nova.virt.hardware [None req-e21c42e3-3594-408e-b517-f54e516b53cd tempest-ServersNegativeTestMultiTenantJSON-212472390 tempest-ServersNegativeTestMultiTenantJSON-212472390-project-member] Image pref 0:0:0 {{(pid=61962) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1525.451504] env[61962]: DEBUG nova.virt.hardware [None req-e21c42e3-3594-408e-b517-f54e516b53cd tempest-ServersNegativeTestMultiTenantJSON-212472390 tempest-ServersNegativeTestMultiTenantJSON-212472390-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61962) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1525.451718] env[61962]: DEBUG nova.virt.hardware [None req-e21c42e3-3594-408e-b517-f54e516b53cd tempest-ServersNegativeTestMultiTenantJSON-212472390 tempest-ServersNegativeTestMultiTenantJSON-212472390-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61962) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1525.452085] env[61962]: DEBUG nova.virt.hardware [None req-e21c42e3-3594-408e-b517-f54e516b53cd tempest-ServersNegativeTestMultiTenantJSON-212472390 tempest-ServersNegativeTestMultiTenantJSON-212472390-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61962) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1525.452269] env[61962]: DEBUG nova.virt.hardware [None req-e21c42e3-3594-408e-b517-f54e516b53cd tempest-ServersNegativeTestMultiTenantJSON-212472390 tempest-ServersNegativeTestMultiTenantJSON-212472390-project-member] Got 1 possible topologies {{(pid=61962) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1525.452477] env[61962]: DEBUG nova.virt.hardware [None req-e21c42e3-3594-408e-b517-f54e516b53cd tempest-ServersNegativeTestMultiTenantJSON-212472390 tempest-ServersNegativeTestMultiTenantJSON-212472390-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61962) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1525.452605] env[61962]: DEBUG nova.virt.hardware [None req-e21c42e3-3594-408e-b517-f54e516b53cd tempest-ServersNegativeTestMultiTenantJSON-212472390 tempest-ServersNegativeTestMultiTenantJSON-212472390-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61962) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1525.453586] env[61962]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-777e5d38-c848-4b1d-86da-8839a5194bc4 {{(pid=61962) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1525.462603] env[61962]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b2f09b97-149e-4cc5-8756-9b6e11dbada7 {{(pid=61962) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1525.751262] env[61962]: DEBUG nova.network.neutron [None req-e21c42e3-3594-408e-b517-f54e516b53cd tempest-ServersNegativeTestMultiTenantJSON-212472390 tempest-ServersNegativeTestMultiTenantJSON-212472390-project-member] [instance: 53048279-c3ea-484f-9a3e-049b12892f7a] Successfully created port: 50058c5d-2695-4aef-8223-20154f9a96f7 {{(pid=61962) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1526.697100] env[61962]: DEBUG nova.compute.manager [req-c70cb3bd-b6ba-4c94-a7fe-9974ea4bb4f3 req-0ff55e27-2758-42ee-9961-d10a3d435285 service nova] [instance: 53048279-c3ea-484f-9a3e-049b12892f7a] Received event network-vif-plugged-50058c5d-2695-4aef-8223-20154f9a96f7 {{(pid=61962) external_instance_event /opt/stack/nova/nova/compute/manager.py:11210}} [ 1526.697348] env[61962]: DEBUG oslo_concurrency.lockutils [req-c70cb3bd-b6ba-4c94-a7fe-9974ea4bb4f3 req-0ff55e27-2758-42ee-9961-d10a3d435285 service nova] Acquiring lock "53048279-c3ea-484f-9a3e-049b12892f7a-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=61962) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1526.697564] env[61962]: DEBUG oslo_concurrency.lockutils [req-c70cb3bd-b6ba-4c94-a7fe-9974ea4bb4f3 req-0ff55e27-2758-42ee-9961-d10a3d435285 service nova] Lock "53048279-c3ea-484f-9a3e-049b12892f7a-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=61962) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1526.697737] env[61962]: DEBUG oslo_concurrency.lockutils [req-c70cb3bd-b6ba-4c94-a7fe-9974ea4bb4f3 req-0ff55e27-2758-42ee-9961-d10a3d435285 service nova] Lock "53048279-c3ea-484f-9a3e-049b12892f7a-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=61962) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1526.697902] env[61962]: DEBUG nova.compute.manager [req-c70cb3bd-b6ba-4c94-a7fe-9974ea4bb4f3 req-0ff55e27-2758-42ee-9961-d10a3d435285 service nova] [instance: 53048279-c3ea-484f-9a3e-049b12892f7a] No waiting events found dispatching network-vif-plugged-50058c5d-2695-4aef-8223-20154f9a96f7 {{(pid=61962) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1526.699213] env[61962]: WARNING nova.compute.manager [req-c70cb3bd-b6ba-4c94-a7fe-9974ea4bb4f3 req-0ff55e27-2758-42ee-9961-d10a3d435285 service nova] [instance: 53048279-c3ea-484f-9a3e-049b12892f7a] Received unexpected event network-vif-plugged-50058c5d-2695-4aef-8223-20154f9a96f7 for instance with vm_state building and task_state spawning. [ 1526.798030] env[61962]: DEBUG nova.network.neutron [None req-e21c42e3-3594-408e-b517-f54e516b53cd tempest-ServersNegativeTestMultiTenantJSON-212472390 tempest-ServersNegativeTestMultiTenantJSON-212472390-project-member] [instance: 53048279-c3ea-484f-9a3e-049b12892f7a] Successfully updated port: 50058c5d-2695-4aef-8223-20154f9a96f7 {{(pid=61962) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1526.814703] env[61962]: DEBUG oslo_concurrency.lockutils [None req-e21c42e3-3594-408e-b517-f54e516b53cd tempest-ServersNegativeTestMultiTenantJSON-212472390 tempest-ServersNegativeTestMultiTenantJSON-212472390-project-member] Acquiring lock "refresh_cache-53048279-c3ea-484f-9a3e-049b12892f7a" {{(pid=61962) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1526.814772] env[61962]: DEBUG oslo_concurrency.lockutils [None req-e21c42e3-3594-408e-b517-f54e516b53cd tempest-ServersNegativeTestMultiTenantJSON-212472390 tempest-ServersNegativeTestMultiTenantJSON-212472390-project-member] Acquired lock "refresh_cache-53048279-c3ea-484f-9a3e-049b12892f7a" {{(pid=61962) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1526.815033] env[61962]: DEBUG nova.network.neutron [None req-e21c42e3-3594-408e-b517-f54e516b53cd tempest-ServersNegativeTestMultiTenantJSON-212472390 tempest-ServersNegativeTestMultiTenantJSON-212472390-project-member] [instance: 53048279-c3ea-484f-9a3e-049b12892f7a] Building network info cache for instance {{(pid=61962) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 1526.861823] env[61962]: DEBUG nova.network.neutron [None req-e21c42e3-3594-408e-b517-f54e516b53cd tempest-ServersNegativeTestMultiTenantJSON-212472390 tempest-ServersNegativeTestMultiTenantJSON-212472390-project-member] [instance: 53048279-c3ea-484f-9a3e-049b12892f7a] Instance cache missing network info. {{(pid=61962) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 1527.098212] env[61962]: DEBUG nova.network.neutron [None req-e21c42e3-3594-408e-b517-f54e516b53cd tempest-ServersNegativeTestMultiTenantJSON-212472390 tempest-ServersNegativeTestMultiTenantJSON-212472390-project-member] [instance: 53048279-c3ea-484f-9a3e-049b12892f7a] Updating instance_info_cache with network_info: [{"id": "50058c5d-2695-4aef-8223-20154f9a96f7", "address": "fa:16:3e:e4:66:2e", "network": {"id": "cb750794-20c5-42db-9534-b84f6952057e", "bridge": "br-int", "label": "tempest-ServersNegativeTestMultiTenantJSON-1728628786-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "88a7b37d54f047149b05fc80e0b51601", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "0f096917-a0cf-4add-a9d2-23ca1c723b3b", "external-id": "nsx-vlan-transportzone-894", "segmentation_id": 894, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap50058c5d-26", "ovs_interfaceid": "50058c5d-2695-4aef-8223-20154f9a96f7", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61962) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1527.112215] env[61962]: DEBUG oslo_concurrency.lockutils [None req-e21c42e3-3594-408e-b517-f54e516b53cd tempest-ServersNegativeTestMultiTenantJSON-212472390 tempest-ServersNegativeTestMultiTenantJSON-212472390-project-member] Releasing lock "refresh_cache-53048279-c3ea-484f-9a3e-049b12892f7a" {{(pid=61962) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1527.112621] env[61962]: DEBUG nova.compute.manager [None req-e21c42e3-3594-408e-b517-f54e516b53cd tempest-ServersNegativeTestMultiTenantJSON-212472390 tempest-ServersNegativeTestMultiTenantJSON-212472390-project-member] [instance: 53048279-c3ea-484f-9a3e-049b12892f7a] Instance network_info: |[{"id": "50058c5d-2695-4aef-8223-20154f9a96f7", "address": "fa:16:3e:e4:66:2e", "network": {"id": "cb750794-20c5-42db-9534-b84f6952057e", "bridge": "br-int", "label": "tempest-ServersNegativeTestMultiTenantJSON-1728628786-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "88a7b37d54f047149b05fc80e0b51601", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "0f096917-a0cf-4add-a9d2-23ca1c723b3b", "external-id": "nsx-vlan-transportzone-894", "segmentation_id": 894, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap50058c5d-26", "ovs_interfaceid": "50058c5d-2695-4aef-8223-20154f9a96f7", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=61962) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1527.113447] env[61962]: DEBUG nova.virt.vmwareapi.vmops [None req-e21c42e3-3594-408e-b517-f54e516b53cd tempest-ServersNegativeTestMultiTenantJSON-212472390 tempest-ServersNegativeTestMultiTenantJSON-212472390-project-member] [instance: 53048279-c3ea-484f-9a3e-049b12892f7a] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:e4:66:2e', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '0f096917-a0cf-4add-a9d2-23ca1c723b3b', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '50058c5d-2695-4aef-8223-20154f9a96f7', 'vif_model': 'vmxnet3'}] {{(pid=61962) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1527.121506] env[61962]: DEBUG nova.virt.vmwareapi.vm_util [None req-e21c42e3-3594-408e-b517-f54e516b53cd tempest-ServersNegativeTestMultiTenantJSON-212472390 tempest-ServersNegativeTestMultiTenantJSON-212472390-project-member] Creating folder: Project (88a7b37d54f047149b05fc80e0b51601). Parent ref: group-v953327. {{(pid=61962) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 1527.122101] env[61962]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-08257add-d81b-43cb-bba5-dda45203e657 {{(pid=61962) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1527.133639] env[61962]: INFO nova.virt.vmwareapi.vm_util [None req-e21c42e3-3594-408e-b517-f54e516b53cd tempest-ServersNegativeTestMultiTenantJSON-212472390 tempest-ServersNegativeTestMultiTenantJSON-212472390-project-member] Created folder: Project (88a7b37d54f047149b05fc80e0b51601) in parent group-v953327. [ 1527.133869] env[61962]: DEBUG nova.virt.vmwareapi.vm_util [None req-e21c42e3-3594-408e-b517-f54e516b53cd tempest-ServersNegativeTestMultiTenantJSON-212472390 tempest-ServersNegativeTestMultiTenantJSON-212472390-project-member] Creating folder: Instances. Parent ref: group-v953411. {{(pid=61962) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 1527.134147] env[61962]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-4972d9ea-d30f-4551-b3ab-9ed20160ceba {{(pid=61962) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1527.143223] env[61962]: INFO nova.virt.vmwareapi.vm_util [None req-e21c42e3-3594-408e-b517-f54e516b53cd tempest-ServersNegativeTestMultiTenantJSON-212472390 tempest-ServersNegativeTestMultiTenantJSON-212472390-project-member] Created folder: Instances in parent group-v953411. [ 1527.143497] env[61962]: DEBUG oslo.service.loopingcall [None req-e21c42e3-3594-408e-b517-f54e516b53cd tempest-ServersNegativeTestMultiTenantJSON-212472390 tempest-ServersNegativeTestMultiTenantJSON-212472390-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=61962) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1527.143722] env[61962]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 53048279-c3ea-484f-9a3e-049b12892f7a] Creating VM on the ESX host {{(pid=61962) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 1527.143946] env[61962]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-befb3ea2-c843-4df6-ab4c-5080372f1a17 {{(pid=61962) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1527.159105] env[61962]: DEBUG oslo_service.periodic_task [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Running periodic task ComputeManager.update_available_resource {{(pid=61962) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1527.165826] env[61962]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1527.165826] env[61962]: value = "task-4892051" [ 1527.165826] env[61962]: _type = "Task" [ 1527.165826] env[61962]: } to complete. {{(pid=61962) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1527.171477] env[61962]: DEBUG oslo_concurrency.lockutils [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=61962) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1527.171700] env[61962]: DEBUG oslo_concurrency.lockutils [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=61962) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1527.171872] env[61962]: DEBUG oslo_concurrency.lockutils [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=61962) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1527.172034] env[61962]: DEBUG nova.compute.resource_tracker [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=61962) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 1527.176341] env[61962]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2e0e8429-6421-4f9d-91fc-4953e6b3af3a {{(pid=61962) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1527.178784] env[61962]: DEBUG oslo_vmware.api [-] Task: {'id': task-4892051, 'name': CreateVM_Task} progress is 0%. {{(pid=61962) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1527.184561] env[61962]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-538fd647-10dc-4648-9219-5c1211785045 {{(pid=61962) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1527.199311] env[61962]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9fd9c785-761b-4c8b-9df9-6e95a7c2c76a {{(pid=61962) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1527.206255] env[61962]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cb825f64-e28a-4d20-9e43-160afc6d04d7 {{(pid=61962) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1527.240492] env[61962]: DEBUG nova.compute.resource_tracker [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=180539MB free_disk=96GB free_vcpus=48 pci_devices=None {{(pid=61962) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 1527.240697] env[61962]: DEBUG oslo_concurrency.lockutils [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=61962) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1527.240891] env[61962]: DEBUG oslo_concurrency.lockutils [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=61962) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1527.336340] env[61962]: DEBUG nova.compute.resource_tracker [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Instance d89abe69-dd71-4cfe-b80b-3b2aeba0b8a9 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61962) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 1527.336494] env[61962]: DEBUG nova.compute.resource_tracker [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Instance 5572a1d3-5d2f-4af5-b9f6-ec57506e2069 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61962) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 1527.336616] env[61962]: DEBUG nova.compute.resource_tracker [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Instance d970d822-12ab-43cc-8d34-abc6eff8a68d actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61962) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 1527.336733] env[61962]: DEBUG nova.compute.resource_tracker [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Instance 35bf25a7-a3c2-40f1-b415-42d6167b0a1c actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61962) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 1527.336848] env[61962]: DEBUG nova.compute.resource_tracker [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Instance 1f7429fe-e6f3-4b0a-bae8-38ead4607ae7 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61962) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 1527.336962] env[61962]: DEBUG nova.compute.resource_tracker [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Instance c3c36949-754e-4bab-860d-74e0cde4dfd6 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61962) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 1527.337089] env[61962]: DEBUG nova.compute.resource_tracker [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Instance 6f0c884b-afb3-4d06-9dd2-f07099c364e0 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61962) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 1527.337204] env[61962]: DEBUG nova.compute.resource_tracker [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Instance 3d2749f8-18d2-4631-adc2-093988125eb4 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61962) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 1527.337315] env[61962]: DEBUG nova.compute.resource_tracker [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Instance 1035139f-2193-4d1c-a220-369ef6096ae0 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61962) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 1527.337424] env[61962]: DEBUG nova.compute.resource_tracker [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Instance 53048279-c3ea-484f-9a3e-049b12892f7a actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61962) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 1527.350405] env[61962]: DEBUG nova.compute.resource_tracker [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Instance 4de50eaf-ee14-4aee-b5c0-d08dc5fc8010 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61962) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1767}} [ 1527.364741] env[61962]: DEBUG nova.compute.resource_tracker [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Instance 25f6fa54-3d0d-4470-b5c9-06ba0ad2663d has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61962) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1767}} [ 1527.379467] env[61962]: DEBUG nova.compute.resource_tracker [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Instance d9b93c9e-bc68-4d00-bd14-031dd4b61919 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61962) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1767}} [ 1527.393880] env[61962]: DEBUG nova.compute.resource_tracker [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Instance 1e71cc99-f026-4197-b7f1-ae18f7f99af6 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61962) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1767}} [ 1527.406643] env[61962]: DEBUG nova.compute.resource_tracker [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Instance 2cf5409f-eaff-4b63-b8bb-eb15a7a81760 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61962) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1767}} [ 1527.418432] env[61962]: DEBUG nova.compute.resource_tracker [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Instance c3514756-a861-4662-b859-0881ced78e19 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61962) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1767}} [ 1527.428934] env[61962]: DEBUG nova.compute.resource_tracker [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Instance 02c198ea-61de-4e70-866c-39781d353d04 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61962) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1767}} [ 1527.429197] env[61962]: DEBUG nova.compute.resource_tracker [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Total usable vcpus: 48, total allocated vcpus: 10 {{(pid=61962) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 1527.429359] env[61962]: DEBUG nova.compute.resource_tracker [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=1792MB phys_disk=100GB used_disk=10GB total_vcpus=48 used_vcpus=10 pci_stats=[] stats={'failed_builds': '69', 'num_instances': '10', 'num_vm_building': '10', 'num_task_deleting': '9', 'num_os_type_None': '10', 'num_proj_e58b68272c404983993a09ad12e8c03b': '2', 'io_workload': '10', 'num_proj_99c377cc8b5545a2b6e712291b14cb04': '1', 'num_proj_696cc1bc78cb4511a76f788d82895fef': '1', 'num_proj_a9c2add945d24a299e5bb0fe543deec3': '1', 'num_proj_e816704de44649099e96127498eed7e9': '1', 'num_proj_eaa1993aea5f4f039f35b32e2321e30f': '1', 'num_proj_a31ea93dd586435ebb82c1dbfd14c2ad': '1', 'num_proj_34477c98bf9c4a87971a62cb96b072c7': '1', 'num_task_spawning': '1', 'num_proj_88a7b37d54f047149b05fc80e0b51601': '1'} {{(pid=61962) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 1527.632181] env[61962]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2e47a4f5-7044-455f-a7b7-e21f8c08bdc6 {{(pid=61962) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1527.640262] env[61962]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b5f93fc0-e41c-4efd-912d-5da268d29e21 {{(pid=61962) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1527.674832] env[61962]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-068879be-aa91-4456-bdf5-6077a81982c7 {{(pid=61962) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1527.685249] env[61962]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-edfd93f2-ff3d-4087-ad1b-5c92e98045de {{(pid=61962) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1527.688740] env[61962]: DEBUG oslo_vmware.api [-] Task: {'id': task-4892051, 'name': CreateVM_Task} progress is 99%. {{(pid=61962) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1527.698863] env[61962]: DEBUG nova.compute.provider_tree [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Inventory has not changed in ProviderTree for provider: 5a20dc57-fddd-49ec-bab5-953a03eebaa1 {{(pid=61962) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1527.707642] env[61962]: DEBUG nova.scheduler.client.report [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Inventory has not changed for provider 5a20dc57-fddd-49ec-bab5-953a03eebaa1 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 96, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61962) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1527.723980] env[61962]: DEBUG nova.compute.resource_tracker [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=61962) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 1527.724185] env[61962]: DEBUG oslo_concurrency.lockutils [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 0.483s {{(pid=61962) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1528.181515] env[61962]: DEBUG oslo_vmware.api [-] Task: {'id': task-4892051, 'name': CreateVM_Task} progress is 99%. {{(pid=61962) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1528.682284] env[61962]: DEBUG oslo_vmware.api [-] Task: {'id': task-4892051, 'name': CreateVM_Task, 'duration_secs': 1.318569} completed successfully. {{(pid=61962) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1528.682515] env[61962]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 53048279-c3ea-484f-9a3e-049b12892f7a] Created VM on the ESX host {{(pid=61962) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 1528.683254] env[61962]: DEBUG oslo_concurrency.lockutils [None req-e21c42e3-3594-408e-b517-f54e516b53cd tempest-ServersNegativeTestMultiTenantJSON-212472390 tempest-ServersNegativeTestMultiTenantJSON-212472390-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/f684bb17-3ab2-4bd5-a19e-4c36c57d0ebe" {{(pid=61962) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1528.683502] env[61962]: DEBUG oslo_concurrency.lockutils [None req-e21c42e3-3594-408e-b517-f54e516b53cd tempest-ServersNegativeTestMultiTenantJSON-212472390 tempest-ServersNegativeTestMultiTenantJSON-212472390-project-member] Acquired lock "[datastore2] devstack-image-cache_base/f684bb17-3ab2-4bd5-a19e-4c36c57d0ebe" {{(pid=61962) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1528.683812] env[61962]: DEBUG oslo_concurrency.lockutils [None req-e21c42e3-3594-408e-b517-f54e516b53cd tempest-ServersNegativeTestMultiTenantJSON-212472390 tempest-ServersNegativeTestMultiTenantJSON-212472390-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/f684bb17-3ab2-4bd5-a19e-4c36c57d0ebe" {{(pid=61962) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1528.684075] env[61962]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-57145117-ed43-4974-88d7-4fa0bef18212 {{(pid=61962) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1528.688630] env[61962]: DEBUG oslo_vmware.api [None req-e21c42e3-3594-408e-b517-f54e516b53cd tempest-ServersNegativeTestMultiTenantJSON-212472390 tempest-ServersNegativeTestMultiTenantJSON-212472390-project-member] Waiting for the task: (returnval){ [ 1528.688630] env[61962]: value = "session[5210918f-aadc-9b29-42fa-0929c2e42627]5203e35c-4d92-24d8-b07a-1a63c786940d" [ 1528.688630] env[61962]: _type = "Task" [ 1528.688630] env[61962]: } to complete. {{(pid=61962) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1528.696528] env[61962]: DEBUG oslo_vmware.api [None req-e21c42e3-3594-408e-b517-f54e516b53cd tempest-ServersNegativeTestMultiTenantJSON-212472390 tempest-ServersNegativeTestMultiTenantJSON-212472390-project-member] Task: {'id': session[5210918f-aadc-9b29-42fa-0929c2e42627]5203e35c-4d92-24d8-b07a-1a63c786940d, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61962) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1528.723883] env[61962]: DEBUG nova.compute.manager [req-641ba685-96a0-484e-96c2-732bca986be5 req-901bf496-c45d-442b-8e38-0829ba3716e9 service nova] [instance: 53048279-c3ea-484f-9a3e-049b12892f7a] Received event network-changed-50058c5d-2695-4aef-8223-20154f9a96f7 {{(pid=61962) external_instance_event /opt/stack/nova/nova/compute/manager.py:11210}} [ 1528.724082] env[61962]: DEBUG nova.compute.manager [req-641ba685-96a0-484e-96c2-732bca986be5 req-901bf496-c45d-442b-8e38-0829ba3716e9 service nova] [instance: 53048279-c3ea-484f-9a3e-049b12892f7a] Refreshing instance network info cache due to event network-changed-50058c5d-2695-4aef-8223-20154f9a96f7. {{(pid=61962) external_instance_event /opt/stack/nova/nova/compute/manager.py:11215}} [ 1528.724325] env[61962]: DEBUG oslo_concurrency.lockutils [req-641ba685-96a0-484e-96c2-732bca986be5 req-901bf496-c45d-442b-8e38-0829ba3716e9 service nova] Acquiring lock "refresh_cache-53048279-c3ea-484f-9a3e-049b12892f7a" {{(pid=61962) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1528.724588] env[61962]: DEBUG oslo_concurrency.lockutils [req-641ba685-96a0-484e-96c2-732bca986be5 req-901bf496-c45d-442b-8e38-0829ba3716e9 service nova] Acquired lock "refresh_cache-53048279-c3ea-484f-9a3e-049b12892f7a" {{(pid=61962) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1528.724588] env[61962]: DEBUG nova.network.neutron [req-641ba685-96a0-484e-96c2-732bca986be5 req-901bf496-c45d-442b-8e38-0829ba3716e9 service nova] [instance: 53048279-c3ea-484f-9a3e-049b12892f7a] Refreshing network info cache for port 50058c5d-2695-4aef-8223-20154f9a96f7 {{(pid=61962) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 1529.046327] env[61962]: DEBUG nova.network.neutron [req-641ba685-96a0-484e-96c2-732bca986be5 req-901bf496-c45d-442b-8e38-0829ba3716e9 service nova] [instance: 53048279-c3ea-484f-9a3e-049b12892f7a] Updated VIF entry in instance network info cache for port 50058c5d-2695-4aef-8223-20154f9a96f7. {{(pid=61962) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 1529.046684] env[61962]: DEBUG nova.network.neutron [req-641ba685-96a0-484e-96c2-732bca986be5 req-901bf496-c45d-442b-8e38-0829ba3716e9 service nova] [instance: 53048279-c3ea-484f-9a3e-049b12892f7a] Updating instance_info_cache with network_info: [{"id": "50058c5d-2695-4aef-8223-20154f9a96f7", "address": "fa:16:3e:e4:66:2e", "network": {"id": "cb750794-20c5-42db-9534-b84f6952057e", "bridge": "br-int", "label": "tempest-ServersNegativeTestMultiTenantJSON-1728628786-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "88a7b37d54f047149b05fc80e0b51601", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "0f096917-a0cf-4add-a9d2-23ca1c723b3b", "external-id": "nsx-vlan-transportzone-894", "segmentation_id": 894, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap50058c5d-26", "ovs_interfaceid": "50058c5d-2695-4aef-8223-20154f9a96f7", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61962) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1529.057333] env[61962]: DEBUG oslo_concurrency.lockutils [req-641ba685-96a0-484e-96c2-732bca986be5 req-901bf496-c45d-442b-8e38-0829ba3716e9 service nova] Releasing lock "refresh_cache-53048279-c3ea-484f-9a3e-049b12892f7a" {{(pid=61962) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1529.200596] env[61962]: DEBUG oslo_concurrency.lockutils [None req-e21c42e3-3594-408e-b517-f54e516b53cd tempest-ServersNegativeTestMultiTenantJSON-212472390 tempest-ServersNegativeTestMultiTenantJSON-212472390-project-member] Releasing lock "[datastore2] devstack-image-cache_base/f684bb17-3ab2-4bd5-a19e-4c36c57d0ebe" {{(pid=61962) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1529.200596] env[61962]: DEBUG nova.virt.vmwareapi.vmops [None req-e21c42e3-3594-408e-b517-f54e516b53cd tempest-ServersNegativeTestMultiTenantJSON-212472390 tempest-ServersNegativeTestMultiTenantJSON-212472390-project-member] [instance: 53048279-c3ea-484f-9a3e-049b12892f7a] Processing image f684bb17-3ab2-4bd5-a19e-4c36c57d0ebe {{(pid=61962) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1529.200596] env[61962]: DEBUG oslo_concurrency.lockutils [None req-e21c42e3-3594-408e-b517-f54e516b53cd tempest-ServersNegativeTestMultiTenantJSON-212472390 tempest-ServersNegativeTestMultiTenantJSON-212472390-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/f684bb17-3ab2-4bd5-a19e-4c36c57d0ebe/f684bb17-3ab2-4bd5-a19e-4c36c57d0ebe.vmdk" {{(pid=61962) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1529.723438] env[61962]: DEBUG oslo_service.periodic_task [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=61962) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1529.723592] env[61962]: DEBUG nova.compute.manager [None req-ba337279-4320-40ca-b616-7e590432f203 None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=61962) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10626}} [ 1531.154422] env[61962]: DEBUG oslo_service.periodic_task [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=61962) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1533.158230] env[61962]: DEBUG oslo_service.periodic_task [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=61962) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1535.158970] env[61962]: DEBUG oslo_service.periodic_task [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=61962) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1535.158970] env[61962]: DEBUG nova.compute.manager [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Starting heal instance info cache {{(pid=61962) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10007}} [ 1535.159455] env[61962]: DEBUG nova.compute.manager [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Rebuilding the list of instances to heal {{(pid=61962) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10011}} [ 1535.191763] env[61962]: DEBUG nova.compute.manager [None req-ba337279-4320-40ca-b616-7e590432f203 None None] [instance: d89abe69-dd71-4cfe-b80b-3b2aeba0b8a9] Skipping network cache update for instance because it is Building. {{(pid=61962) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10020}} [ 1535.191924] env[61962]: DEBUG nova.compute.manager [None req-ba337279-4320-40ca-b616-7e590432f203 None None] [instance: 5572a1d3-5d2f-4af5-b9f6-ec57506e2069] Skipping network cache update for instance because it is Building. {{(pid=61962) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10020}} [ 1535.192065] env[61962]: DEBUG nova.compute.manager [None req-ba337279-4320-40ca-b616-7e590432f203 None None] [instance: d970d822-12ab-43cc-8d34-abc6eff8a68d] Skipping network cache update for instance because it is Building. {{(pid=61962) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10020}} [ 1535.192194] env[61962]: DEBUG nova.compute.manager [None req-ba337279-4320-40ca-b616-7e590432f203 None None] [instance: 35bf25a7-a3c2-40f1-b415-42d6167b0a1c] Skipping network cache update for instance because it is Building. {{(pid=61962) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10020}} [ 1535.192315] env[61962]: DEBUG nova.compute.manager [None req-ba337279-4320-40ca-b616-7e590432f203 None None] [instance: 1f7429fe-e6f3-4b0a-bae8-38ead4607ae7] Skipping network cache update for instance because it is Building. {{(pid=61962) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10020}} [ 1535.192432] env[61962]: DEBUG nova.compute.manager [None req-ba337279-4320-40ca-b616-7e590432f203 None None] [instance: c3c36949-754e-4bab-860d-74e0cde4dfd6] Skipping network cache update for instance because it is Building. {{(pid=61962) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10020}} [ 1535.192549] env[61962]: DEBUG nova.compute.manager [None req-ba337279-4320-40ca-b616-7e590432f203 None None] [instance: 6f0c884b-afb3-4d06-9dd2-f07099c364e0] Skipping network cache update for instance because it is Building. {{(pid=61962) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10020}} [ 1535.192664] env[61962]: DEBUG nova.compute.manager [None req-ba337279-4320-40ca-b616-7e590432f203 None None] [instance: 3d2749f8-18d2-4631-adc2-093988125eb4] Skipping network cache update for instance because it is Building. {{(pid=61962) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10020}} [ 1535.192781] env[61962]: DEBUG nova.compute.manager [None req-ba337279-4320-40ca-b616-7e590432f203 None None] [instance: 1035139f-2193-4d1c-a220-369ef6096ae0] Skipping network cache update for instance because it is Building. {{(pid=61962) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10020}} [ 1535.192893] env[61962]: DEBUG nova.compute.manager [None req-ba337279-4320-40ca-b616-7e590432f203 None None] [instance: 53048279-c3ea-484f-9a3e-049b12892f7a] Skipping network cache update for instance because it is Building. {{(pid=61962) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10020}} [ 1535.193017] env[61962]: DEBUG nova.compute.manager [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Didn't find any instances for network info cache update. {{(pid=61962) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10093}} [ 1537.157766] env[61962]: DEBUG oslo_service.periodic_task [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=61962) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1538.158917] env[61962]: DEBUG oslo_service.periodic_task [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=61962) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1540.159253] env[61962]: DEBUG oslo_service.periodic_task [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=61962) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1542.158715] env[61962]: DEBUG oslo_service.periodic_task [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=61962) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1546.172702] env[61962]: DEBUG oslo_concurrency.lockutils [None req-1d807e42-57be-4e74-beca-e8e0fad65f6b tempest-ServersNegativeTestMultiTenantJSON-212472390 tempest-ServersNegativeTestMultiTenantJSON-212472390-project-member] Acquiring lock "53048279-c3ea-484f-9a3e-049b12892f7a" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=61962) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1571.725922] env[61962]: WARNING oslo_vmware.rw_handles [None req-b93286ff-68e9-402b-952c-c77dbd100087 tempest-ImagesTestJSON-240028984 tempest-ImagesTestJSON-240028984-project-member] Error occurred while reading the HTTP response.: http.client.RemoteDisconnected: Remote end closed connection without response [ 1571.725922] env[61962]: ERROR oslo_vmware.rw_handles Traceback (most recent call last): [ 1571.725922] env[61962]: ERROR oslo_vmware.rw_handles File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py", line 283, in close [ 1571.725922] env[61962]: ERROR oslo_vmware.rw_handles self._conn.getresponse() [ 1571.725922] env[61962]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 1375, in getresponse [ 1571.725922] env[61962]: ERROR oslo_vmware.rw_handles response.begin() [ 1571.725922] env[61962]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 318, in begin [ 1571.725922] env[61962]: ERROR oslo_vmware.rw_handles version, status, reason = self._read_status() [ 1571.725922] env[61962]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 287, in _read_status [ 1571.725922] env[61962]: ERROR oslo_vmware.rw_handles raise RemoteDisconnected("Remote end closed connection without" [ 1571.725922] env[61962]: ERROR oslo_vmware.rw_handles http.client.RemoteDisconnected: Remote end closed connection without response [ 1571.725922] env[61962]: ERROR oslo_vmware.rw_handles [ 1571.726809] env[61962]: DEBUG nova.virt.vmwareapi.images [None req-b93286ff-68e9-402b-952c-c77dbd100087 tempest-ImagesTestJSON-240028984 tempest-ImagesTestJSON-240028984-project-member] [instance: d89abe69-dd71-4cfe-b80b-3b2aeba0b8a9] Downloaded image file data f684bb17-3ab2-4bd5-a19e-4c36c57d0ebe to vmware_temp/d02fc654-1c96-4c2b-9c75-85bace6ca59a/f684bb17-3ab2-4bd5-a19e-4c36c57d0ebe/tmp-sparse.vmdk on the data store datastore2 {{(pid=61962) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:258}} [ 1571.728479] env[61962]: DEBUG nova.virt.vmwareapi.vmops [None req-b93286ff-68e9-402b-952c-c77dbd100087 tempest-ImagesTestJSON-240028984 tempest-ImagesTestJSON-240028984-project-member] [instance: d89abe69-dd71-4cfe-b80b-3b2aeba0b8a9] Caching image {{(pid=61962) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 1571.728761] env[61962]: DEBUG nova.virt.vmwareapi.vm_util [None req-b93286ff-68e9-402b-952c-c77dbd100087 tempest-ImagesTestJSON-240028984 tempest-ImagesTestJSON-240028984-project-member] Copying Virtual Disk [datastore2] vmware_temp/d02fc654-1c96-4c2b-9c75-85bace6ca59a/f684bb17-3ab2-4bd5-a19e-4c36c57d0ebe/tmp-sparse.vmdk to [datastore2] vmware_temp/d02fc654-1c96-4c2b-9c75-85bace6ca59a/f684bb17-3ab2-4bd5-a19e-4c36c57d0ebe/f684bb17-3ab2-4bd5-a19e-4c36c57d0ebe.vmdk {{(pid=61962) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 1571.729072] env[61962]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-bdc93542-e6ce-483d-bbc1-9ace270ec79a {{(pid=61962) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1571.737609] env[61962]: DEBUG oslo_vmware.api [None req-b93286ff-68e9-402b-952c-c77dbd100087 tempest-ImagesTestJSON-240028984 tempest-ImagesTestJSON-240028984-project-member] Waiting for the task: (returnval){ [ 1571.737609] env[61962]: value = "task-4892052" [ 1571.737609] env[61962]: _type = "Task" [ 1571.737609] env[61962]: } to complete. {{(pid=61962) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1571.745891] env[61962]: DEBUG oslo_vmware.api [None req-b93286ff-68e9-402b-952c-c77dbd100087 tempest-ImagesTestJSON-240028984 tempest-ImagesTestJSON-240028984-project-member] Task: {'id': task-4892052, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61962) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1572.248229] env[61962]: DEBUG oslo_vmware.exceptions [None req-b93286ff-68e9-402b-952c-c77dbd100087 tempest-ImagesTestJSON-240028984 tempest-ImagesTestJSON-240028984-project-member] Fault InvalidArgument not matched. {{(pid=61962) get_fault_class /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/exceptions.py:290}} [ 1572.248466] env[61962]: DEBUG oslo_concurrency.lockutils [None req-b93286ff-68e9-402b-952c-c77dbd100087 tempest-ImagesTestJSON-240028984 tempest-ImagesTestJSON-240028984-project-member] Releasing lock "[datastore2] devstack-image-cache_base/f684bb17-3ab2-4bd5-a19e-4c36c57d0ebe/f684bb17-3ab2-4bd5-a19e-4c36c57d0ebe.vmdk" {{(pid=61962) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1572.249024] env[61962]: ERROR nova.compute.manager [None req-b93286ff-68e9-402b-952c-c77dbd100087 tempest-ImagesTestJSON-240028984 tempest-ImagesTestJSON-240028984-project-member] [instance: d89abe69-dd71-4cfe-b80b-3b2aeba0b8a9] Instance failed to spawn: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1572.249024] env[61962]: Faults: ['InvalidArgument'] [ 1572.249024] env[61962]: ERROR nova.compute.manager [instance: d89abe69-dd71-4cfe-b80b-3b2aeba0b8a9] Traceback (most recent call last): [ 1572.249024] env[61962]: ERROR nova.compute.manager [instance: d89abe69-dd71-4cfe-b80b-3b2aeba0b8a9] File "/opt/stack/nova/nova/compute/manager.py", line 2886, in _build_resources [ 1572.249024] env[61962]: ERROR nova.compute.manager [instance: d89abe69-dd71-4cfe-b80b-3b2aeba0b8a9] yield resources [ 1572.249024] env[61962]: ERROR nova.compute.manager [instance: d89abe69-dd71-4cfe-b80b-3b2aeba0b8a9] File "/opt/stack/nova/nova/compute/manager.py", line 2633, in _build_and_run_instance [ 1572.249024] env[61962]: ERROR nova.compute.manager [instance: d89abe69-dd71-4cfe-b80b-3b2aeba0b8a9] self.driver.spawn(context, instance, image_meta, [ 1572.249024] env[61962]: ERROR nova.compute.manager [instance: d89abe69-dd71-4cfe-b80b-3b2aeba0b8a9] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 1572.249024] env[61962]: ERROR nova.compute.manager [instance: d89abe69-dd71-4cfe-b80b-3b2aeba0b8a9] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1572.249024] env[61962]: ERROR nova.compute.manager [instance: d89abe69-dd71-4cfe-b80b-3b2aeba0b8a9] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 1572.249024] env[61962]: ERROR nova.compute.manager [instance: d89abe69-dd71-4cfe-b80b-3b2aeba0b8a9] self._fetch_image_if_missing(context, vi) [ 1572.249024] env[61962]: ERROR nova.compute.manager [instance: d89abe69-dd71-4cfe-b80b-3b2aeba0b8a9] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 1572.249024] env[61962]: ERROR nova.compute.manager [instance: d89abe69-dd71-4cfe-b80b-3b2aeba0b8a9] image_cache(vi, tmp_image_ds_loc) [ 1572.249691] env[61962]: ERROR nova.compute.manager [instance: d89abe69-dd71-4cfe-b80b-3b2aeba0b8a9] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 1572.249691] env[61962]: ERROR nova.compute.manager [instance: d89abe69-dd71-4cfe-b80b-3b2aeba0b8a9] vm_util.copy_virtual_disk( [ 1572.249691] env[61962]: ERROR nova.compute.manager [instance: d89abe69-dd71-4cfe-b80b-3b2aeba0b8a9] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 1572.249691] env[61962]: ERROR nova.compute.manager [instance: d89abe69-dd71-4cfe-b80b-3b2aeba0b8a9] session._wait_for_task(vmdk_copy_task) [ 1572.249691] env[61962]: ERROR nova.compute.manager [instance: d89abe69-dd71-4cfe-b80b-3b2aeba0b8a9] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 1572.249691] env[61962]: ERROR nova.compute.manager [instance: d89abe69-dd71-4cfe-b80b-3b2aeba0b8a9] return self.wait_for_task(task_ref) [ 1572.249691] env[61962]: ERROR nova.compute.manager [instance: d89abe69-dd71-4cfe-b80b-3b2aeba0b8a9] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 1572.249691] env[61962]: ERROR nova.compute.manager [instance: d89abe69-dd71-4cfe-b80b-3b2aeba0b8a9] return evt.wait() [ 1572.249691] env[61962]: ERROR nova.compute.manager [instance: d89abe69-dd71-4cfe-b80b-3b2aeba0b8a9] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 1572.249691] env[61962]: ERROR nova.compute.manager [instance: d89abe69-dd71-4cfe-b80b-3b2aeba0b8a9] result = hub.switch() [ 1572.249691] env[61962]: ERROR nova.compute.manager [instance: d89abe69-dd71-4cfe-b80b-3b2aeba0b8a9] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 1572.249691] env[61962]: ERROR nova.compute.manager [instance: d89abe69-dd71-4cfe-b80b-3b2aeba0b8a9] return self.greenlet.switch() [ 1572.249691] env[61962]: ERROR nova.compute.manager [instance: d89abe69-dd71-4cfe-b80b-3b2aeba0b8a9] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 1572.250592] env[61962]: ERROR nova.compute.manager [instance: d89abe69-dd71-4cfe-b80b-3b2aeba0b8a9] self.f(*self.args, **self.kw) [ 1572.250592] env[61962]: ERROR nova.compute.manager [instance: d89abe69-dd71-4cfe-b80b-3b2aeba0b8a9] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 1572.250592] env[61962]: ERROR nova.compute.manager [instance: d89abe69-dd71-4cfe-b80b-3b2aeba0b8a9] raise exceptions.translate_fault(task_info.error) [ 1572.250592] env[61962]: ERROR nova.compute.manager [instance: d89abe69-dd71-4cfe-b80b-3b2aeba0b8a9] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1572.250592] env[61962]: ERROR nova.compute.manager [instance: d89abe69-dd71-4cfe-b80b-3b2aeba0b8a9] Faults: ['InvalidArgument'] [ 1572.250592] env[61962]: ERROR nova.compute.manager [instance: d89abe69-dd71-4cfe-b80b-3b2aeba0b8a9] [ 1572.250592] env[61962]: INFO nova.compute.manager [None req-b93286ff-68e9-402b-952c-c77dbd100087 tempest-ImagesTestJSON-240028984 tempest-ImagesTestJSON-240028984-project-member] [instance: d89abe69-dd71-4cfe-b80b-3b2aeba0b8a9] Terminating instance [ 1572.251181] env[61962]: DEBUG oslo_concurrency.lockutils [None req-7a230fc5-6af3-471e-9f4a-043813914279 tempest-ServerActionsTestJSON-2003961310 tempest-ServerActionsTestJSON-2003961310-project-member] Acquired lock "[datastore2] devstack-image-cache_base/f684bb17-3ab2-4bd5-a19e-4c36c57d0ebe/f684bb17-3ab2-4bd5-a19e-4c36c57d0ebe.vmdk" {{(pid=61962) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1572.251181] env[61962]: DEBUG nova.virt.vmwareapi.ds_util [None req-7a230fc5-6af3-471e-9f4a-043813914279 tempest-ServerActionsTestJSON-2003961310 tempest-ServerActionsTestJSON-2003961310-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=61962) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1572.251671] env[61962]: DEBUG nova.compute.manager [None req-b93286ff-68e9-402b-952c-c77dbd100087 tempest-ImagesTestJSON-240028984 tempest-ImagesTestJSON-240028984-project-member] [instance: d89abe69-dd71-4cfe-b80b-3b2aeba0b8a9] Start destroying the instance on the hypervisor. {{(pid=61962) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3142}} [ 1572.251894] env[61962]: DEBUG nova.virt.vmwareapi.vmops [None req-b93286ff-68e9-402b-952c-c77dbd100087 tempest-ImagesTestJSON-240028984 tempest-ImagesTestJSON-240028984-project-member] [instance: d89abe69-dd71-4cfe-b80b-3b2aeba0b8a9] Destroying instance {{(pid=61962) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1572.252192] env[61962]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-a769654b-f266-4776-848e-0dc3ad4d4bca {{(pid=61962) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1572.254492] env[61962]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7d2ee3a0-f0b1-4ad4-a707-5b81ee07c68b {{(pid=61962) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1572.262427] env[61962]: DEBUG nova.virt.vmwareapi.vmops [None req-b93286ff-68e9-402b-952c-c77dbd100087 tempest-ImagesTestJSON-240028984 tempest-ImagesTestJSON-240028984-project-member] [instance: d89abe69-dd71-4cfe-b80b-3b2aeba0b8a9] Unregistering the VM {{(pid=61962) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 1572.262427] env[61962]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-c4588067-a53b-4f89-a2b8-960cf904c1ff {{(pid=61962) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1572.264363] env[61962]: DEBUG nova.virt.vmwareapi.ds_util [None req-7a230fc5-6af3-471e-9f4a-043813914279 tempest-ServerActionsTestJSON-2003961310 tempest-ServerActionsTestJSON-2003961310-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=61962) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1572.264578] env[61962]: DEBUG nova.virt.vmwareapi.vmops [None req-7a230fc5-6af3-471e-9f4a-043813914279 tempest-ServerActionsTestJSON-2003961310 tempest-ServerActionsTestJSON-2003961310-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=61962) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 1572.265540] env[61962]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-4dec01f2-d7a4-4ad9-b912-2a2241ec3de9 {{(pid=61962) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1572.270479] env[61962]: DEBUG oslo_vmware.api [None req-7a230fc5-6af3-471e-9f4a-043813914279 tempest-ServerActionsTestJSON-2003961310 tempest-ServerActionsTestJSON-2003961310-project-member] Waiting for the task: (returnval){ [ 1572.270479] env[61962]: value = "session[5210918f-aadc-9b29-42fa-0929c2e42627]5243ef0f-d6b4-3bb7-920f-ed4c88d8e387" [ 1572.270479] env[61962]: _type = "Task" [ 1572.270479] env[61962]: } to complete. {{(pid=61962) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1572.278473] env[61962]: DEBUG oslo_vmware.api [None req-7a230fc5-6af3-471e-9f4a-043813914279 tempest-ServerActionsTestJSON-2003961310 tempest-ServerActionsTestJSON-2003961310-project-member] Task: {'id': session[5210918f-aadc-9b29-42fa-0929c2e42627]5243ef0f-d6b4-3bb7-920f-ed4c88d8e387, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61962) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1572.326065] env[61962]: DEBUG nova.virt.vmwareapi.vmops [None req-b93286ff-68e9-402b-952c-c77dbd100087 tempest-ImagesTestJSON-240028984 tempest-ImagesTestJSON-240028984-project-member] [instance: d89abe69-dd71-4cfe-b80b-3b2aeba0b8a9] Unregistered the VM {{(pid=61962) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 1572.326273] env[61962]: DEBUG nova.virt.vmwareapi.vmops [None req-b93286ff-68e9-402b-952c-c77dbd100087 tempest-ImagesTestJSON-240028984 tempest-ImagesTestJSON-240028984-project-member] [instance: d89abe69-dd71-4cfe-b80b-3b2aeba0b8a9] Deleting contents of the VM from datastore datastore2 {{(pid=61962) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 1572.326367] env[61962]: DEBUG nova.virt.vmwareapi.ds_util [None req-b93286ff-68e9-402b-952c-c77dbd100087 tempest-ImagesTestJSON-240028984 tempest-ImagesTestJSON-240028984-project-member] Deleting the datastore file [datastore2] d89abe69-dd71-4cfe-b80b-3b2aeba0b8a9 {{(pid=61962) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1572.326638] env[61962]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-0aa36119-a8b3-4715-b4d6-6a30d4cbcb7e {{(pid=61962) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1572.333546] env[61962]: DEBUG oslo_vmware.api [None req-b93286ff-68e9-402b-952c-c77dbd100087 tempest-ImagesTestJSON-240028984 tempest-ImagesTestJSON-240028984-project-member] Waiting for the task: (returnval){ [ 1572.333546] env[61962]: value = "task-4892054" [ 1572.333546] env[61962]: _type = "Task" [ 1572.333546] env[61962]: } to complete. {{(pid=61962) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1572.342889] env[61962]: DEBUG oslo_vmware.api [None req-b93286ff-68e9-402b-952c-c77dbd100087 tempest-ImagesTestJSON-240028984 tempest-ImagesTestJSON-240028984-project-member] Task: {'id': task-4892054, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61962) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1572.780802] env[61962]: DEBUG nova.virt.vmwareapi.vmops [None req-7a230fc5-6af3-471e-9f4a-043813914279 tempest-ServerActionsTestJSON-2003961310 tempest-ServerActionsTestJSON-2003961310-project-member] [instance: 5572a1d3-5d2f-4af5-b9f6-ec57506e2069] Preparing fetch location {{(pid=61962) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 1572.781222] env[61962]: DEBUG nova.virt.vmwareapi.ds_util [None req-7a230fc5-6af3-471e-9f4a-043813914279 tempest-ServerActionsTestJSON-2003961310 tempest-ServerActionsTestJSON-2003961310-project-member] Creating directory with path [datastore2] vmware_temp/edf7d2d9-9154-4ef2-be57-3a9963ac274f/f684bb17-3ab2-4bd5-a19e-4c36c57d0ebe {{(pid=61962) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1572.781315] env[61962]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-6d54aeff-b6a2-469f-96af-3020f2173e1f {{(pid=61962) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1572.792979] env[61962]: DEBUG nova.virt.vmwareapi.ds_util [None req-7a230fc5-6af3-471e-9f4a-043813914279 tempest-ServerActionsTestJSON-2003961310 tempest-ServerActionsTestJSON-2003961310-project-member] Created directory with path [datastore2] vmware_temp/edf7d2d9-9154-4ef2-be57-3a9963ac274f/f684bb17-3ab2-4bd5-a19e-4c36c57d0ebe {{(pid=61962) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1572.793191] env[61962]: DEBUG nova.virt.vmwareapi.vmops [None req-7a230fc5-6af3-471e-9f4a-043813914279 tempest-ServerActionsTestJSON-2003961310 tempest-ServerActionsTestJSON-2003961310-project-member] [instance: 5572a1d3-5d2f-4af5-b9f6-ec57506e2069] Fetch image to [datastore2] vmware_temp/edf7d2d9-9154-4ef2-be57-3a9963ac274f/f684bb17-3ab2-4bd5-a19e-4c36c57d0ebe/tmp-sparse.vmdk {{(pid=61962) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 1572.793367] env[61962]: DEBUG nova.virt.vmwareapi.vmops [None req-7a230fc5-6af3-471e-9f4a-043813914279 tempest-ServerActionsTestJSON-2003961310 tempest-ServerActionsTestJSON-2003961310-project-member] [instance: 5572a1d3-5d2f-4af5-b9f6-ec57506e2069] Downloading image file data f684bb17-3ab2-4bd5-a19e-4c36c57d0ebe to [datastore2] vmware_temp/edf7d2d9-9154-4ef2-be57-3a9963ac274f/f684bb17-3ab2-4bd5-a19e-4c36c57d0ebe/tmp-sparse.vmdk on the data store datastore2 {{(pid=61962) _fetch_image_as_file /opt/stack/nova/nova/virt/vmwareapi/vmops.py:399}} [ 1572.794174] env[61962]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-38715d18-cb65-4035-b940-6e7eff1658d7 {{(pid=61962) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1572.801120] env[61962]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4afae7be-47e7-4208-bf5c-f292bad7de14 {{(pid=61962) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1572.810383] env[61962]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3144c418-8786-4197-b160-3968f63bf718 {{(pid=61962) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1572.844623] env[61962]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1b83ff1d-4297-4c17-a1f6-eba78dc469f8 {{(pid=61962) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1572.852348] env[61962]: DEBUG oslo_vmware.api [None req-b93286ff-68e9-402b-952c-c77dbd100087 tempest-ImagesTestJSON-240028984 tempest-ImagesTestJSON-240028984-project-member] Task: {'id': task-4892054, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.09556} completed successfully. {{(pid=61962) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1572.853919] env[61962]: DEBUG nova.virt.vmwareapi.ds_util [None req-b93286ff-68e9-402b-952c-c77dbd100087 tempest-ImagesTestJSON-240028984 tempest-ImagesTestJSON-240028984-project-member] Deleted the datastore file {{(pid=61962) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1572.854129] env[61962]: DEBUG nova.virt.vmwareapi.vmops [None req-b93286ff-68e9-402b-952c-c77dbd100087 tempest-ImagesTestJSON-240028984 tempest-ImagesTestJSON-240028984-project-member] [instance: d89abe69-dd71-4cfe-b80b-3b2aeba0b8a9] Deleted contents of the VM from datastore datastore2 {{(pid=61962) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 1572.854329] env[61962]: DEBUG nova.virt.vmwareapi.vmops [None req-b93286ff-68e9-402b-952c-c77dbd100087 tempest-ImagesTestJSON-240028984 tempest-ImagesTestJSON-240028984-project-member] [instance: d89abe69-dd71-4cfe-b80b-3b2aeba0b8a9] Instance destroyed {{(pid=61962) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1572.854507] env[61962]: INFO nova.compute.manager [None req-b93286ff-68e9-402b-952c-c77dbd100087 tempest-ImagesTestJSON-240028984 tempest-ImagesTestJSON-240028984-project-member] [instance: d89abe69-dd71-4cfe-b80b-3b2aeba0b8a9] Took 0.60 seconds to destroy the instance on the hypervisor. [ 1572.856616] env[61962]: DEBUG nova.compute.claims [None req-b93286ff-68e9-402b-952c-c77dbd100087 tempest-ImagesTestJSON-240028984 tempest-ImagesTestJSON-240028984-project-member] [instance: d89abe69-dd71-4cfe-b80b-3b2aeba0b8a9] Aborting claim: {{(pid=61962) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 1572.856781] env[61962]: DEBUG oslo_concurrency.lockutils [None req-b93286ff-68e9-402b-952c-c77dbd100087 tempest-ImagesTestJSON-240028984 tempest-ImagesTestJSON-240028984-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=61962) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1572.856993] env[61962]: DEBUG oslo_concurrency.lockutils [None req-b93286ff-68e9-402b-952c-c77dbd100087 tempest-ImagesTestJSON-240028984 tempest-ImagesTestJSON-240028984-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 0.000s {{(pid=61962) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1572.859535] env[61962]: DEBUG oslo_vmware.service [-] Invoking SessionManager.AcquireGenericServiceTicket with opID=oslo.vmware-26d2c190-8318-400a-adac-647f6f5764c0 {{(pid=61962) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1572.881318] env[61962]: DEBUG nova.virt.vmwareapi.images [None req-7a230fc5-6af3-471e-9f4a-043813914279 tempest-ServerActionsTestJSON-2003961310 tempest-ServerActionsTestJSON-2003961310-project-member] [instance: 5572a1d3-5d2f-4af5-b9f6-ec57506e2069] Downloading image file data f684bb17-3ab2-4bd5-a19e-4c36c57d0ebe to the data store datastore2 {{(pid=61962) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:245}} [ 1573.008729] env[61962]: DEBUG oslo_vmware.rw_handles [None req-7a230fc5-6af3-471e-9f4a-043813914279 tempest-ServerActionsTestJSON-2003961310 tempest-ServerActionsTestJSON-2003961310-project-member] Creating HTTP connection to write to file with size = 21318656 and URL = https://esx7c1n2.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/edf7d2d9-9154-4ef2-be57-3a9963ac274f/f684bb17-3ab2-4bd5-a19e-4c36c57d0ebe/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=61962) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 1573.071029] env[61962]: DEBUG oslo_vmware.rw_handles [None req-7a230fc5-6af3-471e-9f4a-043813914279 tempest-ServerActionsTestJSON-2003961310 tempest-ServerActionsTestJSON-2003961310-project-member] Completed reading data from the image iterator. {{(pid=61962) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 1573.071375] env[61962]: DEBUG oslo_vmware.rw_handles [None req-7a230fc5-6af3-471e-9f4a-043813914279 tempest-ServerActionsTestJSON-2003961310 tempest-ServerActionsTestJSON-2003961310-project-member] Closing write handle for https://esx7c1n2.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/edf7d2d9-9154-4ef2-be57-3a9963ac274f/f684bb17-3ab2-4bd5-a19e-4c36c57d0ebe/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=61962) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:281}} [ 1573.182663] env[61962]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1b2a9af4-d0a8-485c-9559-a5be29161043 {{(pid=61962) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1573.191289] env[61962]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bd7b3a83-fb5b-40a8-853b-2f1c21e3c3aa {{(pid=61962) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1573.224296] env[61962]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-01074e91-ced5-4c7a-8702-3601926726fe {{(pid=61962) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1573.232238] env[61962]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9e69f58d-5c2f-4589-8e48-0229afbd8f2d {{(pid=61962) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1573.245964] env[61962]: DEBUG nova.compute.provider_tree [None req-b93286ff-68e9-402b-952c-c77dbd100087 tempest-ImagesTestJSON-240028984 tempest-ImagesTestJSON-240028984-project-member] Inventory has not changed in ProviderTree for provider: 5a20dc57-fddd-49ec-bab5-953a03eebaa1 {{(pid=61962) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1573.255144] env[61962]: DEBUG nova.scheduler.client.report [None req-b93286ff-68e9-402b-952c-c77dbd100087 tempest-ImagesTestJSON-240028984 tempest-ImagesTestJSON-240028984-project-member] Inventory has not changed for provider 5a20dc57-fddd-49ec-bab5-953a03eebaa1 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 96, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61962) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1573.275474] env[61962]: DEBUG oslo_concurrency.lockutils [None req-b93286ff-68e9-402b-952c-c77dbd100087 tempest-ImagesTestJSON-240028984 tempest-ImagesTestJSON-240028984-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 0.416s {{(pid=61962) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1573.275474] env[61962]: ERROR nova.compute.manager [None req-b93286ff-68e9-402b-952c-c77dbd100087 tempest-ImagesTestJSON-240028984 tempest-ImagesTestJSON-240028984-project-member] [instance: d89abe69-dd71-4cfe-b80b-3b2aeba0b8a9] Failed to build and run instance: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1573.275474] env[61962]: Faults: ['InvalidArgument'] [ 1573.275474] env[61962]: ERROR nova.compute.manager [instance: d89abe69-dd71-4cfe-b80b-3b2aeba0b8a9] Traceback (most recent call last): [ 1573.275474] env[61962]: ERROR nova.compute.manager [instance: d89abe69-dd71-4cfe-b80b-3b2aeba0b8a9] File "/opt/stack/nova/nova/compute/manager.py", line 2633, in _build_and_run_instance [ 1573.275474] env[61962]: ERROR nova.compute.manager [instance: d89abe69-dd71-4cfe-b80b-3b2aeba0b8a9] self.driver.spawn(context, instance, image_meta, [ 1573.275474] env[61962]: ERROR nova.compute.manager [instance: d89abe69-dd71-4cfe-b80b-3b2aeba0b8a9] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 1573.275474] env[61962]: ERROR nova.compute.manager [instance: d89abe69-dd71-4cfe-b80b-3b2aeba0b8a9] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1573.275474] env[61962]: ERROR nova.compute.manager [instance: d89abe69-dd71-4cfe-b80b-3b2aeba0b8a9] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 1573.275474] env[61962]: ERROR nova.compute.manager [instance: d89abe69-dd71-4cfe-b80b-3b2aeba0b8a9] self._fetch_image_if_missing(context, vi) [ 1573.276030] env[61962]: ERROR nova.compute.manager [instance: d89abe69-dd71-4cfe-b80b-3b2aeba0b8a9] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 1573.276030] env[61962]: ERROR nova.compute.manager [instance: d89abe69-dd71-4cfe-b80b-3b2aeba0b8a9] image_cache(vi, tmp_image_ds_loc) [ 1573.276030] env[61962]: ERROR nova.compute.manager [instance: d89abe69-dd71-4cfe-b80b-3b2aeba0b8a9] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 1573.276030] env[61962]: ERROR nova.compute.manager [instance: d89abe69-dd71-4cfe-b80b-3b2aeba0b8a9] vm_util.copy_virtual_disk( [ 1573.276030] env[61962]: ERROR nova.compute.manager [instance: d89abe69-dd71-4cfe-b80b-3b2aeba0b8a9] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 1573.276030] env[61962]: ERROR nova.compute.manager [instance: d89abe69-dd71-4cfe-b80b-3b2aeba0b8a9] session._wait_for_task(vmdk_copy_task) [ 1573.276030] env[61962]: ERROR nova.compute.manager [instance: d89abe69-dd71-4cfe-b80b-3b2aeba0b8a9] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 1573.276030] env[61962]: ERROR nova.compute.manager [instance: d89abe69-dd71-4cfe-b80b-3b2aeba0b8a9] return self.wait_for_task(task_ref) [ 1573.276030] env[61962]: ERROR nova.compute.manager [instance: d89abe69-dd71-4cfe-b80b-3b2aeba0b8a9] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 1573.276030] env[61962]: ERROR nova.compute.manager [instance: d89abe69-dd71-4cfe-b80b-3b2aeba0b8a9] return evt.wait() [ 1573.276030] env[61962]: ERROR nova.compute.manager [instance: d89abe69-dd71-4cfe-b80b-3b2aeba0b8a9] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 1573.276030] env[61962]: ERROR nova.compute.manager [instance: d89abe69-dd71-4cfe-b80b-3b2aeba0b8a9] result = hub.switch() [ 1573.276030] env[61962]: ERROR nova.compute.manager [instance: d89abe69-dd71-4cfe-b80b-3b2aeba0b8a9] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 1573.276451] env[61962]: ERROR nova.compute.manager [instance: d89abe69-dd71-4cfe-b80b-3b2aeba0b8a9] return self.greenlet.switch() [ 1573.276451] env[61962]: ERROR nova.compute.manager [instance: d89abe69-dd71-4cfe-b80b-3b2aeba0b8a9] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 1573.276451] env[61962]: ERROR nova.compute.manager [instance: d89abe69-dd71-4cfe-b80b-3b2aeba0b8a9] self.f(*self.args, **self.kw) [ 1573.276451] env[61962]: ERROR nova.compute.manager [instance: d89abe69-dd71-4cfe-b80b-3b2aeba0b8a9] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 1573.276451] env[61962]: ERROR nova.compute.manager [instance: d89abe69-dd71-4cfe-b80b-3b2aeba0b8a9] raise exceptions.translate_fault(task_info.error) [ 1573.276451] env[61962]: ERROR nova.compute.manager [instance: d89abe69-dd71-4cfe-b80b-3b2aeba0b8a9] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1573.276451] env[61962]: ERROR nova.compute.manager [instance: d89abe69-dd71-4cfe-b80b-3b2aeba0b8a9] Faults: ['InvalidArgument'] [ 1573.276451] env[61962]: ERROR nova.compute.manager [instance: d89abe69-dd71-4cfe-b80b-3b2aeba0b8a9] [ 1573.276451] env[61962]: DEBUG nova.compute.utils [None req-b93286ff-68e9-402b-952c-c77dbd100087 tempest-ImagesTestJSON-240028984 tempest-ImagesTestJSON-240028984-project-member] [instance: d89abe69-dd71-4cfe-b80b-3b2aeba0b8a9] VimFaultException {{(pid=61962) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 1573.276712] env[61962]: DEBUG nova.compute.manager [None req-b93286ff-68e9-402b-952c-c77dbd100087 tempest-ImagesTestJSON-240028984 tempest-ImagesTestJSON-240028984-project-member] [instance: d89abe69-dd71-4cfe-b80b-3b2aeba0b8a9] Build of instance d89abe69-dd71-4cfe-b80b-3b2aeba0b8a9 was re-scheduled: A specified parameter was not correct: fileType [ 1573.276712] env[61962]: Faults: ['InvalidArgument'] {{(pid=61962) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2472}} [ 1573.277102] env[61962]: DEBUG nova.compute.manager [None req-b93286ff-68e9-402b-952c-c77dbd100087 tempest-ImagesTestJSON-240028984 tempest-ImagesTestJSON-240028984-project-member] [instance: d89abe69-dd71-4cfe-b80b-3b2aeba0b8a9] Unplugging VIFs for instance {{(pid=61962) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:2998}} [ 1573.277284] env[61962]: DEBUG nova.compute.manager [None req-b93286ff-68e9-402b-952c-c77dbd100087 tempest-ImagesTestJSON-240028984 tempest-ImagesTestJSON-240028984-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=61962) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3021}} [ 1573.277506] env[61962]: DEBUG nova.compute.manager [None req-b93286ff-68e9-402b-952c-c77dbd100087 tempest-ImagesTestJSON-240028984 tempest-ImagesTestJSON-240028984-project-member] [instance: d89abe69-dd71-4cfe-b80b-3b2aeba0b8a9] Deallocating network for instance {{(pid=61962) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2281}} [ 1573.277683] env[61962]: DEBUG nova.network.neutron [None req-b93286ff-68e9-402b-952c-c77dbd100087 tempest-ImagesTestJSON-240028984 tempest-ImagesTestJSON-240028984-project-member] [instance: d89abe69-dd71-4cfe-b80b-3b2aeba0b8a9] deallocate_for_instance() {{(pid=61962) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1573.892465] env[61962]: DEBUG nova.network.neutron [None req-b93286ff-68e9-402b-952c-c77dbd100087 tempest-ImagesTestJSON-240028984 tempest-ImagesTestJSON-240028984-project-member] [instance: d89abe69-dd71-4cfe-b80b-3b2aeba0b8a9] Updating instance_info_cache with network_info: [] {{(pid=61962) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1573.906427] env[61962]: INFO nova.compute.manager [None req-b93286ff-68e9-402b-952c-c77dbd100087 tempest-ImagesTestJSON-240028984 tempest-ImagesTestJSON-240028984-project-member] [instance: d89abe69-dd71-4cfe-b80b-3b2aeba0b8a9] Took 0.63 seconds to deallocate network for instance. [ 1574.047851] env[61962]: INFO nova.scheduler.client.report [None req-b93286ff-68e9-402b-952c-c77dbd100087 tempest-ImagesTestJSON-240028984 tempest-ImagesTestJSON-240028984-project-member] Deleted allocations for instance d89abe69-dd71-4cfe-b80b-3b2aeba0b8a9 [ 1574.069454] env[61962]: DEBUG oslo_concurrency.lockutils [None req-b93286ff-68e9-402b-952c-c77dbd100087 tempest-ImagesTestJSON-240028984 tempest-ImagesTestJSON-240028984-project-member] Lock "d89abe69-dd71-4cfe-b80b-3b2aeba0b8a9" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 682.137s {{(pid=61962) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1574.070711] env[61962]: DEBUG oslo_concurrency.lockutils [None req-555ff3a2-44c6-4017-8dd7-6f5fefd753e6 tempest-ImagesTestJSON-240028984 tempest-ImagesTestJSON-240028984-project-member] Lock "d89abe69-dd71-4cfe-b80b-3b2aeba0b8a9" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 485.735s {{(pid=61962) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1574.071202] env[61962]: DEBUG oslo_concurrency.lockutils [None req-555ff3a2-44c6-4017-8dd7-6f5fefd753e6 tempest-ImagesTestJSON-240028984 tempest-ImagesTestJSON-240028984-project-member] Acquiring lock "d89abe69-dd71-4cfe-b80b-3b2aeba0b8a9-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=61962) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1574.071202] env[61962]: DEBUG oslo_concurrency.lockutils [None req-555ff3a2-44c6-4017-8dd7-6f5fefd753e6 tempest-ImagesTestJSON-240028984 tempest-ImagesTestJSON-240028984-project-member] Lock "d89abe69-dd71-4cfe-b80b-3b2aeba0b8a9-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=61962) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1574.071368] env[61962]: DEBUG oslo_concurrency.lockutils [None req-555ff3a2-44c6-4017-8dd7-6f5fefd753e6 tempest-ImagesTestJSON-240028984 tempest-ImagesTestJSON-240028984-project-member] Lock "d89abe69-dd71-4cfe-b80b-3b2aeba0b8a9-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=61962) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1574.073573] env[61962]: INFO nova.compute.manager [None req-555ff3a2-44c6-4017-8dd7-6f5fefd753e6 tempest-ImagesTestJSON-240028984 tempest-ImagesTestJSON-240028984-project-member] [instance: d89abe69-dd71-4cfe-b80b-3b2aeba0b8a9] Terminating instance [ 1574.075748] env[61962]: DEBUG nova.compute.manager [None req-555ff3a2-44c6-4017-8dd7-6f5fefd753e6 tempest-ImagesTestJSON-240028984 tempest-ImagesTestJSON-240028984-project-member] [instance: d89abe69-dd71-4cfe-b80b-3b2aeba0b8a9] Start destroying the instance on the hypervisor. {{(pid=61962) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3142}} [ 1574.075934] env[61962]: DEBUG nova.virt.vmwareapi.vmops [None req-555ff3a2-44c6-4017-8dd7-6f5fefd753e6 tempest-ImagesTestJSON-240028984 tempest-ImagesTestJSON-240028984-project-member] [instance: d89abe69-dd71-4cfe-b80b-3b2aeba0b8a9] Destroying instance {{(pid=61962) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1574.076662] env[61962]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-c5fe3616-e2da-4cd4-9761-cc0662f39a14 {{(pid=61962) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1574.085255] env[61962]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ef626252-6f13-4902-b1d6-3bc437127ecb {{(pid=61962) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1574.096517] env[61962]: DEBUG nova.compute.manager [None req-120692ab-bf28-47fb-a63c-11dfebf4eb15 tempest-ServerTagsTestJSON-1371681020 tempest-ServerTagsTestJSON-1371681020-project-member] [instance: 4de50eaf-ee14-4aee-b5c0-d08dc5fc8010] Starting instance... {{(pid=61962) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2424}} [ 1574.120043] env[61962]: WARNING nova.virt.vmwareapi.vmops [None req-555ff3a2-44c6-4017-8dd7-6f5fefd753e6 tempest-ImagesTestJSON-240028984 tempest-ImagesTestJSON-240028984-project-member] [instance: d89abe69-dd71-4cfe-b80b-3b2aeba0b8a9] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance d89abe69-dd71-4cfe-b80b-3b2aeba0b8a9 could not be found. [ 1574.120043] env[61962]: DEBUG nova.virt.vmwareapi.vmops [None req-555ff3a2-44c6-4017-8dd7-6f5fefd753e6 tempest-ImagesTestJSON-240028984 tempest-ImagesTestJSON-240028984-project-member] [instance: d89abe69-dd71-4cfe-b80b-3b2aeba0b8a9] Instance destroyed {{(pid=61962) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1574.120043] env[61962]: INFO nova.compute.manager [None req-555ff3a2-44c6-4017-8dd7-6f5fefd753e6 tempest-ImagesTestJSON-240028984 tempest-ImagesTestJSON-240028984-project-member] [instance: d89abe69-dd71-4cfe-b80b-3b2aeba0b8a9] Took 0.04 seconds to destroy the instance on the hypervisor. [ 1574.120309] env[61962]: DEBUG oslo.service.loopingcall [None req-555ff3a2-44c6-4017-8dd7-6f5fefd753e6 tempest-ImagesTestJSON-240028984 tempest-ImagesTestJSON-240028984-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61962) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1574.120545] env[61962]: DEBUG nova.compute.manager [-] [instance: d89abe69-dd71-4cfe-b80b-3b2aeba0b8a9] Deallocating network for instance {{(pid=61962) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2281}} [ 1574.120640] env[61962]: DEBUG nova.network.neutron [-] [instance: d89abe69-dd71-4cfe-b80b-3b2aeba0b8a9] deallocate_for_instance() {{(pid=61962) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1574.123104] env[61962]: DEBUG nova.compute.manager [None req-120692ab-bf28-47fb-a63c-11dfebf4eb15 tempest-ServerTagsTestJSON-1371681020 tempest-ServerTagsTestJSON-1371681020-project-member] [instance: 4de50eaf-ee14-4aee-b5c0-d08dc5fc8010] Instance disappeared before build. {{(pid=61962) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2431}} [ 1574.146309] env[61962]: DEBUG oslo_concurrency.lockutils [None req-120692ab-bf28-47fb-a63c-11dfebf4eb15 tempest-ServerTagsTestJSON-1371681020 tempest-ServerTagsTestJSON-1371681020-project-member] Lock "4de50eaf-ee14-4aee-b5c0-d08dc5fc8010" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 195.757s {{(pid=61962) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1574.161463] env[61962]: DEBUG nova.compute.manager [None req-14df904d-2edb-4fe8-be90-6d0f9cac5476 tempest-AttachVolumeNegativeTest-1446100430 tempest-AttachVolumeNegativeTest-1446100430-project-member] [instance: 25f6fa54-3d0d-4470-b5c9-06ba0ad2663d] Starting instance... {{(pid=61962) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2424}} [ 1574.164621] env[61962]: DEBUG nova.network.neutron [-] [instance: d89abe69-dd71-4cfe-b80b-3b2aeba0b8a9] Updating instance_info_cache with network_info: [] {{(pid=61962) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1574.175338] env[61962]: INFO nova.compute.manager [-] [instance: d89abe69-dd71-4cfe-b80b-3b2aeba0b8a9] Took 0.05 seconds to deallocate network for instance. [ 1574.234249] env[61962]: DEBUG oslo_concurrency.lockutils [None req-14df904d-2edb-4fe8-be90-6d0f9cac5476 tempest-AttachVolumeNegativeTest-1446100430 tempest-AttachVolumeNegativeTest-1446100430-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61962) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1574.234659] env[61962]: DEBUG oslo_concurrency.lockutils [None req-14df904d-2edb-4fe8-be90-6d0f9cac5476 tempest-AttachVolumeNegativeTest-1446100430 tempest-AttachVolumeNegativeTest-1446100430-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.001s {{(pid=61962) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1574.236713] env[61962]: INFO nova.compute.claims [None req-14df904d-2edb-4fe8-be90-6d0f9cac5476 tempest-AttachVolumeNegativeTest-1446100430 tempest-AttachVolumeNegativeTest-1446100430-project-member] [instance: 25f6fa54-3d0d-4470-b5c9-06ba0ad2663d] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1574.299474] env[61962]: DEBUG oslo_concurrency.lockutils [None req-555ff3a2-44c6-4017-8dd7-6f5fefd753e6 tempest-ImagesTestJSON-240028984 tempest-ImagesTestJSON-240028984-project-member] Lock "d89abe69-dd71-4cfe-b80b-3b2aeba0b8a9" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 0.229s {{(pid=61962) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1574.301053] env[61962]: DEBUG oslo_concurrency.lockutils [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Lock "d89abe69-dd71-4cfe-b80b-3b2aeba0b8a9" acquired by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: waited 443.783s {{(pid=61962) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1574.301053] env[61962]: INFO nova.compute.manager [None req-ba337279-4320-40ca-b616-7e590432f203 None None] [instance: d89abe69-dd71-4cfe-b80b-3b2aeba0b8a9] During sync_power_state the instance has a pending task (deleting). Skip. [ 1574.301298] env[61962]: DEBUG oslo_concurrency.lockutils [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Lock "d89abe69-dd71-4cfe-b80b-3b2aeba0b8a9" "released" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: held 0.000s {{(pid=61962) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1574.551518] env[61962]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e009f297-a1d5-4e75-a6af-a40f85e5c999 {{(pid=61962) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1574.559830] env[61962]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e2af52e9-a0d1-4273-8507-48333b083daa {{(pid=61962) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1574.592634] env[61962]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-55e79587-28eb-4c46-afef-b484614fd616 {{(pid=61962) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1574.601311] env[61962]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d1d50f02-6abb-4047-8de4-1d543be97961 {{(pid=61962) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1574.615821] env[61962]: DEBUG nova.compute.provider_tree [None req-14df904d-2edb-4fe8-be90-6d0f9cac5476 tempest-AttachVolumeNegativeTest-1446100430 tempest-AttachVolumeNegativeTest-1446100430-project-member] Inventory has not changed in ProviderTree for provider: 5a20dc57-fddd-49ec-bab5-953a03eebaa1 {{(pid=61962) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1574.625114] env[61962]: DEBUG nova.scheduler.client.report [None req-14df904d-2edb-4fe8-be90-6d0f9cac5476 tempest-AttachVolumeNegativeTest-1446100430 tempest-AttachVolumeNegativeTest-1446100430-project-member] Inventory has not changed for provider 5a20dc57-fddd-49ec-bab5-953a03eebaa1 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 96, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61962) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1574.651061] env[61962]: DEBUG oslo_concurrency.lockutils [None req-14df904d-2edb-4fe8-be90-6d0f9cac5476 tempest-AttachVolumeNegativeTest-1446100430 tempest-AttachVolumeNegativeTest-1446100430-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.416s {{(pid=61962) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1574.651583] env[61962]: DEBUG nova.compute.manager [None req-14df904d-2edb-4fe8-be90-6d0f9cac5476 tempest-AttachVolumeNegativeTest-1446100430 tempest-AttachVolumeNegativeTest-1446100430-project-member] [instance: 25f6fa54-3d0d-4470-b5c9-06ba0ad2663d] Start building networks asynchronously for instance. {{(pid=61962) _build_resources /opt/stack/nova/nova/compute/manager.py:2821}} [ 1574.693219] env[61962]: DEBUG nova.compute.utils [None req-14df904d-2edb-4fe8-be90-6d0f9cac5476 tempest-AttachVolumeNegativeTest-1446100430 tempest-AttachVolumeNegativeTest-1446100430-project-member] Using /dev/sd instead of None {{(pid=61962) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1574.695583] env[61962]: DEBUG nova.compute.manager [None req-14df904d-2edb-4fe8-be90-6d0f9cac5476 tempest-AttachVolumeNegativeTest-1446100430 tempest-AttachVolumeNegativeTest-1446100430-project-member] [instance: 25f6fa54-3d0d-4470-b5c9-06ba0ad2663d] Allocating IP information in the background. {{(pid=61962) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1973}} [ 1574.695583] env[61962]: DEBUG nova.network.neutron [None req-14df904d-2edb-4fe8-be90-6d0f9cac5476 tempest-AttachVolumeNegativeTest-1446100430 tempest-AttachVolumeNegativeTest-1446100430-project-member] [instance: 25f6fa54-3d0d-4470-b5c9-06ba0ad2663d] allocate_for_instance() {{(pid=61962) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 1574.706818] env[61962]: DEBUG nova.compute.manager [None req-14df904d-2edb-4fe8-be90-6d0f9cac5476 tempest-AttachVolumeNegativeTest-1446100430 tempest-AttachVolumeNegativeTest-1446100430-project-member] [instance: 25f6fa54-3d0d-4470-b5c9-06ba0ad2663d] Start building block device mappings for instance. {{(pid=61962) _build_resources /opt/stack/nova/nova/compute/manager.py:2856}} [ 1574.779919] env[61962]: DEBUG nova.compute.manager [None req-14df904d-2edb-4fe8-be90-6d0f9cac5476 tempest-AttachVolumeNegativeTest-1446100430 tempest-AttachVolumeNegativeTest-1446100430-project-member] [instance: 25f6fa54-3d0d-4470-b5c9-06ba0ad2663d] Start spawning the instance on the hypervisor. {{(pid=61962) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2630}} [ 1574.803460] env[61962]: DEBUG nova.policy [None req-14df904d-2edb-4fe8-be90-6d0f9cac5476 tempest-AttachVolumeNegativeTest-1446100430 tempest-AttachVolumeNegativeTest-1446100430-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '94ea520c90c540f0836ee6026603bb07', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '40bdd99e47aa40a28edd38c6b4a09a5f', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61962) authorize /opt/stack/nova/nova/policy.py:203}} [ 1574.815273] env[61962]: DEBUG nova.virt.hardware [None req-14df904d-2edb-4fe8-be90-6d0f9cac5476 tempest-AttachVolumeNegativeTest-1446100430 tempest-AttachVolumeNegativeTest-1446100430-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-12-01T12:09:23Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=128,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-12-01T12:09:07Z,direct_url=,disk_format='vmdk',id=f684bb17-3ab2-4bd5-a19e-4c36c57d0ebe,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='eef556fb5c4f49b889491ae31a496de5',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-12-01T12:09:08Z,virtual_size=,visibility=), allow threads: False {{(pid=61962) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1574.815853] env[61962]: DEBUG nova.virt.hardware [None req-14df904d-2edb-4fe8-be90-6d0f9cac5476 tempest-AttachVolumeNegativeTest-1446100430 tempest-AttachVolumeNegativeTest-1446100430-project-member] Flavor limits 0:0:0 {{(pid=61962) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1574.815853] env[61962]: DEBUG nova.virt.hardware [None req-14df904d-2edb-4fe8-be90-6d0f9cac5476 tempest-AttachVolumeNegativeTest-1446100430 tempest-AttachVolumeNegativeTest-1446100430-project-member] Image limits 0:0:0 {{(pid=61962) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1574.815853] env[61962]: DEBUG nova.virt.hardware [None req-14df904d-2edb-4fe8-be90-6d0f9cac5476 tempest-AttachVolumeNegativeTest-1446100430 tempest-AttachVolumeNegativeTest-1446100430-project-member] Flavor pref 0:0:0 {{(pid=61962) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1574.816048] env[61962]: DEBUG nova.virt.hardware [None req-14df904d-2edb-4fe8-be90-6d0f9cac5476 tempest-AttachVolumeNegativeTest-1446100430 tempest-AttachVolumeNegativeTest-1446100430-project-member] Image pref 0:0:0 {{(pid=61962) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1574.816253] env[61962]: DEBUG nova.virt.hardware [None req-14df904d-2edb-4fe8-be90-6d0f9cac5476 tempest-AttachVolumeNegativeTest-1446100430 tempest-AttachVolumeNegativeTest-1446100430-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61962) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1574.816403] env[61962]: DEBUG nova.virt.hardware [None req-14df904d-2edb-4fe8-be90-6d0f9cac5476 tempest-AttachVolumeNegativeTest-1446100430 tempest-AttachVolumeNegativeTest-1446100430-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61962) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1574.816555] env[61962]: DEBUG nova.virt.hardware [None req-14df904d-2edb-4fe8-be90-6d0f9cac5476 tempest-AttachVolumeNegativeTest-1446100430 tempest-AttachVolumeNegativeTest-1446100430-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61962) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1574.816725] env[61962]: DEBUG nova.virt.hardware [None req-14df904d-2edb-4fe8-be90-6d0f9cac5476 tempest-AttachVolumeNegativeTest-1446100430 tempest-AttachVolumeNegativeTest-1446100430-project-member] Got 1 possible topologies {{(pid=61962) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1574.816884] env[61962]: DEBUG nova.virt.hardware [None req-14df904d-2edb-4fe8-be90-6d0f9cac5476 tempest-AttachVolumeNegativeTest-1446100430 tempest-AttachVolumeNegativeTest-1446100430-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61962) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1574.817074] env[61962]: DEBUG nova.virt.hardware [None req-14df904d-2edb-4fe8-be90-6d0f9cac5476 tempest-AttachVolumeNegativeTest-1446100430 tempest-AttachVolumeNegativeTest-1446100430-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61962) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1574.818224] env[61962]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1ec70ddf-065f-48bc-9bf4-9819387cca56 {{(pid=61962) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1574.827439] env[61962]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-921a55cb-0013-46d4-a0b9-5b9f5c89a51b {{(pid=61962) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1575.451102] env[61962]: DEBUG nova.network.neutron [None req-14df904d-2edb-4fe8-be90-6d0f9cac5476 tempest-AttachVolumeNegativeTest-1446100430 tempest-AttachVolumeNegativeTest-1446100430-project-member] [instance: 25f6fa54-3d0d-4470-b5c9-06ba0ad2663d] Successfully created port: 41a6ac18-6f2b-4413-a1ae-cbdce9b3fec4 {{(pid=61962) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1576.545699] env[61962]: DEBUG nova.compute.manager [req-3cdcbfe2-6065-428c-9758-cb6adacc0859 req-a3f2a982-2d96-457e-b887-c00d7741f0ae service nova] [instance: 25f6fa54-3d0d-4470-b5c9-06ba0ad2663d] Received event network-vif-plugged-41a6ac18-6f2b-4413-a1ae-cbdce9b3fec4 {{(pid=61962) external_instance_event /opt/stack/nova/nova/compute/manager.py:11210}} [ 1576.545986] env[61962]: DEBUG oslo_concurrency.lockutils [req-3cdcbfe2-6065-428c-9758-cb6adacc0859 req-a3f2a982-2d96-457e-b887-c00d7741f0ae service nova] Acquiring lock "25f6fa54-3d0d-4470-b5c9-06ba0ad2663d-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=61962) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1576.546147] env[61962]: DEBUG oslo_concurrency.lockutils [req-3cdcbfe2-6065-428c-9758-cb6adacc0859 req-a3f2a982-2d96-457e-b887-c00d7741f0ae service nova] Lock "25f6fa54-3d0d-4470-b5c9-06ba0ad2663d-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=61962) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1576.546317] env[61962]: DEBUG oslo_concurrency.lockutils [req-3cdcbfe2-6065-428c-9758-cb6adacc0859 req-a3f2a982-2d96-457e-b887-c00d7741f0ae service nova] Lock "25f6fa54-3d0d-4470-b5c9-06ba0ad2663d-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=61962) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1576.546478] env[61962]: DEBUG nova.compute.manager [req-3cdcbfe2-6065-428c-9758-cb6adacc0859 req-a3f2a982-2d96-457e-b887-c00d7741f0ae service nova] [instance: 25f6fa54-3d0d-4470-b5c9-06ba0ad2663d] No waiting events found dispatching network-vif-plugged-41a6ac18-6f2b-4413-a1ae-cbdce9b3fec4 {{(pid=61962) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1576.546634] env[61962]: WARNING nova.compute.manager [req-3cdcbfe2-6065-428c-9758-cb6adacc0859 req-a3f2a982-2d96-457e-b887-c00d7741f0ae service nova] [instance: 25f6fa54-3d0d-4470-b5c9-06ba0ad2663d] Received unexpected event network-vif-plugged-41a6ac18-6f2b-4413-a1ae-cbdce9b3fec4 for instance with vm_state building and task_state spawning. [ 1576.693184] env[61962]: DEBUG nova.network.neutron [None req-14df904d-2edb-4fe8-be90-6d0f9cac5476 tempest-AttachVolumeNegativeTest-1446100430 tempest-AttachVolumeNegativeTest-1446100430-project-member] [instance: 25f6fa54-3d0d-4470-b5c9-06ba0ad2663d] Successfully updated port: 41a6ac18-6f2b-4413-a1ae-cbdce9b3fec4 {{(pid=61962) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1576.714539] env[61962]: DEBUG oslo_concurrency.lockutils [None req-14df904d-2edb-4fe8-be90-6d0f9cac5476 tempest-AttachVolumeNegativeTest-1446100430 tempest-AttachVolumeNegativeTest-1446100430-project-member] Acquiring lock "refresh_cache-25f6fa54-3d0d-4470-b5c9-06ba0ad2663d" {{(pid=61962) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1576.714539] env[61962]: DEBUG oslo_concurrency.lockutils [None req-14df904d-2edb-4fe8-be90-6d0f9cac5476 tempest-AttachVolumeNegativeTest-1446100430 tempest-AttachVolumeNegativeTest-1446100430-project-member] Acquired lock "refresh_cache-25f6fa54-3d0d-4470-b5c9-06ba0ad2663d" {{(pid=61962) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1576.715532] env[61962]: DEBUG nova.network.neutron [None req-14df904d-2edb-4fe8-be90-6d0f9cac5476 tempest-AttachVolumeNegativeTest-1446100430 tempest-AttachVolumeNegativeTest-1446100430-project-member] [instance: 25f6fa54-3d0d-4470-b5c9-06ba0ad2663d] Building network info cache for instance {{(pid=61962) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 1576.791982] env[61962]: DEBUG nova.network.neutron [None req-14df904d-2edb-4fe8-be90-6d0f9cac5476 tempest-AttachVolumeNegativeTest-1446100430 tempest-AttachVolumeNegativeTest-1446100430-project-member] [instance: 25f6fa54-3d0d-4470-b5c9-06ba0ad2663d] Instance cache missing network info. {{(pid=61962) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 1577.023572] env[61962]: DEBUG nova.network.neutron [None req-14df904d-2edb-4fe8-be90-6d0f9cac5476 tempest-AttachVolumeNegativeTest-1446100430 tempest-AttachVolumeNegativeTest-1446100430-project-member] [instance: 25f6fa54-3d0d-4470-b5c9-06ba0ad2663d] Updating instance_info_cache with network_info: [{"id": "41a6ac18-6f2b-4413-a1ae-cbdce9b3fec4", "address": "fa:16:3e:66:c5:78", "network": {"id": "975ee46d-f24a-4a30-9ce3-8eaea3cdd4e5", "bridge": "br-int", "label": "tempest-AttachVolumeNegativeTest-878273928-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "40bdd99e47aa40a28edd38c6b4a09a5f", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "3753f451-fa23-4988-9361-074fb0bd3fd4", "external-id": "nsx-vlan-transportzone-440", "segmentation_id": 440, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap41a6ac18-6f", "ovs_interfaceid": "41a6ac18-6f2b-4413-a1ae-cbdce9b3fec4", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61962) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1577.039429] env[61962]: DEBUG oslo_concurrency.lockutils [None req-14df904d-2edb-4fe8-be90-6d0f9cac5476 tempest-AttachVolumeNegativeTest-1446100430 tempest-AttachVolumeNegativeTest-1446100430-project-member] Releasing lock "refresh_cache-25f6fa54-3d0d-4470-b5c9-06ba0ad2663d" {{(pid=61962) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1577.039742] env[61962]: DEBUG nova.compute.manager [None req-14df904d-2edb-4fe8-be90-6d0f9cac5476 tempest-AttachVolumeNegativeTest-1446100430 tempest-AttachVolumeNegativeTest-1446100430-project-member] [instance: 25f6fa54-3d0d-4470-b5c9-06ba0ad2663d] Instance network_info: |[{"id": "41a6ac18-6f2b-4413-a1ae-cbdce9b3fec4", "address": "fa:16:3e:66:c5:78", "network": {"id": "975ee46d-f24a-4a30-9ce3-8eaea3cdd4e5", "bridge": "br-int", "label": "tempest-AttachVolumeNegativeTest-878273928-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "40bdd99e47aa40a28edd38c6b4a09a5f", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "3753f451-fa23-4988-9361-074fb0bd3fd4", "external-id": "nsx-vlan-transportzone-440", "segmentation_id": 440, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap41a6ac18-6f", "ovs_interfaceid": "41a6ac18-6f2b-4413-a1ae-cbdce9b3fec4", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=61962) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1577.040198] env[61962]: DEBUG nova.virt.vmwareapi.vmops [None req-14df904d-2edb-4fe8-be90-6d0f9cac5476 tempest-AttachVolumeNegativeTest-1446100430 tempest-AttachVolumeNegativeTest-1446100430-project-member] [instance: 25f6fa54-3d0d-4470-b5c9-06ba0ad2663d] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:66:c5:78', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '3753f451-fa23-4988-9361-074fb0bd3fd4', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '41a6ac18-6f2b-4413-a1ae-cbdce9b3fec4', 'vif_model': 'vmxnet3'}] {{(pid=61962) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1577.048517] env[61962]: DEBUG nova.virt.vmwareapi.vm_util [None req-14df904d-2edb-4fe8-be90-6d0f9cac5476 tempest-AttachVolumeNegativeTest-1446100430 tempest-AttachVolumeNegativeTest-1446100430-project-member] Creating folder: Project (40bdd99e47aa40a28edd38c6b4a09a5f). Parent ref: group-v953327. {{(pid=61962) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 1577.049182] env[61962]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-f79ebd26-7d95-4a5e-b1b5-557bf8d17380 {{(pid=61962) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1577.060698] env[61962]: INFO nova.virt.vmwareapi.vm_util [None req-14df904d-2edb-4fe8-be90-6d0f9cac5476 tempest-AttachVolumeNegativeTest-1446100430 tempest-AttachVolumeNegativeTest-1446100430-project-member] Created folder: Project (40bdd99e47aa40a28edd38c6b4a09a5f) in parent group-v953327. [ 1577.060934] env[61962]: DEBUG nova.virt.vmwareapi.vm_util [None req-14df904d-2edb-4fe8-be90-6d0f9cac5476 tempest-AttachVolumeNegativeTest-1446100430 tempest-AttachVolumeNegativeTest-1446100430-project-member] Creating folder: Instances. Parent ref: group-v953414. {{(pid=61962) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 1577.061207] env[61962]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-71a62840-566a-4974-9f08-33cecee9ce82 {{(pid=61962) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1577.072322] env[61962]: INFO nova.virt.vmwareapi.vm_util [None req-14df904d-2edb-4fe8-be90-6d0f9cac5476 tempest-AttachVolumeNegativeTest-1446100430 tempest-AttachVolumeNegativeTest-1446100430-project-member] Created folder: Instances in parent group-v953414. [ 1577.072640] env[61962]: DEBUG oslo.service.loopingcall [None req-14df904d-2edb-4fe8-be90-6d0f9cac5476 tempest-AttachVolumeNegativeTest-1446100430 tempest-AttachVolumeNegativeTest-1446100430-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=61962) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1577.072838] env[61962]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 25f6fa54-3d0d-4470-b5c9-06ba0ad2663d] Creating VM on the ESX host {{(pid=61962) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 1577.073109] env[61962]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-0c07034a-7b04-4232-98de-c11c50cc2c03 {{(pid=61962) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1577.094658] env[61962]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1577.094658] env[61962]: value = "task-4892057" [ 1577.094658] env[61962]: _type = "Task" [ 1577.094658] env[61962]: } to complete. {{(pid=61962) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1577.103470] env[61962]: DEBUG oslo_vmware.api [-] Task: {'id': task-4892057, 'name': CreateVM_Task} progress is 0%. {{(pid=61962) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1577.604704] env[61962]: DEBUG oslo_vmware.api [-] Task: {'id': task-4892057, 'name': CreateVM_Task, 'duration_secs': 0.338727} completed successfully. {{(pid=61962) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1577.605126] env[61962]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 25f6fa54-3d0d-4470-b5c9-06ba0ad2663d] Created VM on the ESX host {{(pid=61962) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 1577.605641] env[61962]: DEBUG oslo_concurrency.lockutils [None req-14df904d-2edb-4fe8-be90-6d0f9cac5476 tempest-AttachVolumeNegativeTest-1446100430 tempest-AttachVolumeNegativeTest-1446100430-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/f684bb17-3ab2-4bd5-a19e-4c36c57d0ebe" {{(pid=61962) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1577.605817] env[61962]: DEBUG oslo_concurrency.lockutils [None req-14df904d-2edb-4fe8-be90-6d0f9cac5476 tempest-AttachVolumeNegativeTest-1446100430 tempest-AttachVolumeNegativeTest-1446100430-project-member] Acquired lock "[datastore2] devstack-image-cache_base/f684bb17-3ab2-4bd5-a19e-4c36c57d0ebe" {{(pid=61962) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1577.606164] env[61962]: DEBUG oslo_concurrency.lockutils [None req-14df904d-2edb-4fe8-be90-6d0f9cac5476 tempest-AttachVolumeNegativeTest-1446100430 tempest-AttachVolumeNegativeTest-1446100430-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/f684bb17-3ab2-4bd5-a19e-4c36c57d0ebe" {{(pid=61962) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1577.606451] env[61962]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-5db68560-4c8f-49a3-b5df-d4bba93685fe {{(pid=61962) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1577.611487] env[61962]: DEBUG oslo_vmware.api [None req-14df904d-2edb-4fe8-be90-6d0f9cac5476 tempest-AttachVolumeNegativeTest-1446100430 tempest-AttachVolumeNegativeTest-1446100430-project-member] Waiting for the task: (returnval){ [ 1577.611487] env[61962]: value = "session[5210918f-aadc-9b29-42fa-0929c2e42627]52c564d3-31d8-bff2-7ba4-e4f79283f76d" [ 1577.611487] env[61962]: _type = "Task" [ 1577.611487] env[61962]: } to complete. {{(pid=61962) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1577.620015] env[61962]: DEBUG oslo_vmware.api [None req-14df904d-2edb-4fe8-be90-6d0f9cac5476 tempest-AttachVolumeNegativeTest-1446100430 tempest-AttachVolumeNegativeTest-1446100430-project-member] Task: {'id': session[5210918f-aadc-9b29-42fa-0929c2e42627]52c564d3-31d8-bff2-7ba4-e4f79283f76d, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61962) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1578.124198] env[61962]: DEBUG oslo_concurrency.lockutils [None req-14df904d-2edb-4fe8-be90-6d0f9cac5476 tempest-AttachVolumeNegativeTest-1446100430 tempest-AttachVolumeNegativeTest-1446100430-project-member] Releasing lock "[datastore2] devstack-image-cache_base/f684bb17-3ab2-4bd5-a19e-4c36c57d0ebe" {{(pid=61962) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1578.124408] env[61962]: DEBUG nova.virt.vmwareapi.vmops [None req-14df904d-2edb-4fe8-be90-6d0f9cac5476 tempest-AttachVolumeNegativeTest-1446100430 tempest-AttachVolumeNegativeTest-1446100430-project-member] [instance: 25f6fa54-3d0d-4470-b5c9-06ba0ad2663d] Processing image f684bb17-3ab2-4bd5-a19e-4c36c57d0ebe {{(pid=61962) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1578.124615] env[61962]: DEBUG oslo_concurrency.lockutils [None req-14df904d-2edb-4fe8-be90-6d0f9cac5476 tempest-AttachVolumeNegativeTest-1446100430 tempest-AttachVolumeNegativeTest-1446100430-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/f684bb17-3ab2-4bd5-a19e-4c36c57d0ebe/f684bb17-3ab2-4bd5-a19e-4c36c57d0ebe.vmdk" {{(pid=61962) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1578.581418] env[61962]: DEBUG nova.compute.manager [req-27a54af1-ec06-4157-9cbf-a7a38ede4bf7 req-6c5a692a-7fad-4699-b4e5-d1bead7d5505 service nova] [instance: 25f6fa54-3d0d-4470-b5c9-06ba0ad2663d] Received event network-changed-41a6ac18-6f2b-4413-a1ae-cbdce9b3fec4 {{(pid=61962) external_instance_event /opt/stack/nova/nova/compute/manager.py:11210}} [ 1578.581670] env[61962]: DEBUG nova.compute.manager [req-27a54af1-ec06-4157-9cbf-a7a38ede4bf7 req-6c5a692a-7fad-4699-b4e5-d1bead7d5505 service nova] [instance: 25f6fa54-3d0d-4470-b5c9-06ba0ad2663d] Refreshing instance network info cache due to event network-changed-41a6ac18-6f2b-4413-a1ae-cbdce9b3fec4. {{(pid=61962) external_instance_event /opt/stack/nova/nova/compute/manager.py:11215}} [ 1578.581910] env[61962]: DEBUG oslo_concurrency.lockutils [req-27a54af1-ec06-4157-9cbf-a7a38ede4bf7 req-6c5a692a-7fad-4699-b4e5-d1bead7d5505 service nova] Acquiring lock "refresh_cache-25f6fa54-3d0d-4470-b5c9-06ba0ad2663d" {{(pid=61962) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1578.582067] env[61962]: DEBUG oslo_concurrency.lockutils [req-27a54af1-ec06-4157-9cbf-a7a38ede4bf7 req-6c5a692a-7fad-4699-b4e5-d1bead7d5505 service nova] Acquired lock "refresh_cache-25f6fa54-3d0d-4470-b5c9-06ba0ad2663d" {{(pid=61962) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1578.582236] env[61962]: DEBUG nova.network.neutron [req-27a54af1-ec06-4157-9cbf-a7a38ede4bf7 req-6c5a692a-7fad-4699-b4e5-d1bead7d5505 service nova] [instance: 25f6fa54-3d0d-4470-b5c9-06ba0ad2663d] Refreshing network info cache for port 41a6ac18-6f2b-4413-a1ae-cbdce9b3fec4 {{(pid=61962) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 1578.909780] env[61962]: DEBUG nova.network.neutron [req-27a54af1-ec06-4157-9cbf-a7a38ede4bf7 req-6c5a692a-7fad-4699-b4e5-d1bead7d5505 service nova] [instance: 25f6fa54-3d0d-4470-b5c9-06ba0ad2663d] Updated VIF entry in instance network info cache for port 41a6ac18-6f2b-4413-a1ae-cbdce9b3fec4. {{(pid=61962) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 1578.910218] env[61962]: DEBUG nova.network.neutron [req-27a54af1-ec06-4157-9cbf-a7a38ede4bf7 req-6c5a692a-7fad-4699-b4e5-d1bead7d5505 service nova] [instance: 25f6fa54-3d0d-4470-b5c9-06ba0ad2663d] Updating instance_info_cache with network_info: [{"id": "41a6ac18-6f2b-4413-a1ae-cbdce9b3fec4", "address": "fa:16:3e:66:c5:78", "network": {"id": "975ee46d-f24a-4a30-9ce3-8eaea3cdd4e5", "bridge": "br-int", "label": "tempest-AttachVolumeNegativeTest-878273928-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "40bdd99e47aa40a28edd38c6b4a09a5f", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "3753f451-fa23-4988-9361-074fb0bd3fd4", "external-id": "nsx-vlan-transportzone-440", "segmentation_id": 440, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap41a6ac18-6f", "ovs_interfaceid": "41a6ac18-6f2b-4413-a1ae-cbdce9b3fec4", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61962) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1578.921152] env[61962]: DEBUG oslo_concurrency.lockutils [req-27a54af1-ec06-4157-9cbf-a7a38ede4bf7 req-6c5a692a-7fad-4699-b4e5-d1bead7d5505 service nova] Releasing lock "refresh_cache-25f6fa54-3d0d-4470-b5c9-06ba0ad2663d" {{(pid=61962) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1587.158065] env[61962]: DEBUG oslo_service.periodic_task [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Running periodic task ComputeManager.update_available_resource {{(pid=61962) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1587.169468] env[61962]: DEBUG oslo_concurrency.lockutils [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=61962) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1587.169685] env[61962]: DEBUG oslo_concurrency.lockutils [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=61962) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1587.169844] env[61962]: DEBUG oslo_concurrency.lockutils [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=61962) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1587.170014] env[61962]: DEBUG nova.compute.resource_tracker [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=61962) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 1587.171521] env[61962]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-156d755b-837f-407f-a660-c42241e565ba {{(pid=61962) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1587.180520] env[61962]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-db43f240-0f29-4343-af65-22bc257128d3 {{(pid=61962) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1587.195643] env[61962]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-95a31b06-9720-4c3f-bcd9-0e89fd63b4fa {{(pid=61962) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1587.202757] env[61962]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c0fb4c8d-5512-471c-bb82-62b47e5188f6 {{(pid=61962) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1587.232444] env[61962]: DEBUG nova.compute.resource_tracker [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=180529MB free_disk=96GB free_vcpus=48 pci_devices=None {{(pid=61962) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 1587.232619] env[61962]: DEBUG oslo_concurrency.lockutils [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=61962) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1587.232788] env[61962]: DEBUG oslo_concurrency.lockutils [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=61962) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1587.313122] env[61962]: DEBUG nova.compute.resource_tracker [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Instance 5572a1d3-5d2f-4af5-b9f6-ec57506e2069 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61962) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 1587.313545] env[61962]: DEBUG nova.compute.resource_tracker [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Instance d970d822-12ab-43cc-8d34-abc6eff8a68d actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61962) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 1587.313545] env[61962]: DEBUG nova.compute.resource_tracker [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Instance 35bf25a7-a3c2-40f1-b415-42d6167b0a1c actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61962) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 1587.313545] env[61962]: DEBUG nova.compute.resource_tracker [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Instance 1f7429fe-e6f3-4b0a-bae8-38ead4607ae7 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61962) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 1587.313768] env[61962]: DEBUG nova.compute.resource_tracker [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Instance c3c36949-754e-4bab-860d-74e0cde4dfd6 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61962) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 1587.313768] env[61962]: DEBUG nova.compute.resource_tracker [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Instance 6f0c884b-afb3-4d06-9dd2-f07099c364e0 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61962) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 1587.313868] env[61962]: DEBUG nova.compute.resource_tracker [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Instance 3d2749f8-18d2-4631-adc2-093988125eb4 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61962) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 1587.314102] env[61962]: DEBUG nova.compute.resource_tracker [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Instance 1035139f-2193-4d1c-a220-369ef6096ae0 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61962) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 1587.314200] env[61962]: DEBUG nova.compute.resource_tracker [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Instance 53048279-c3ea-484f-9a3e-049b12892f7a actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61962) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 1587.314295] env[61962]: DEBUG nova.compute.resource_tracker [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Instance 25f6fa54-3d0d-4470-b5c9-06ba0ad2663d actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61962) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 1587.326563] env[61962]: DEBUG nova.compute.resource_tracker [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Instance d9b93c9e-bc68-4d00-bd14-031dd4b61919 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61962) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1767}} [ 1587.338738] env[61962]: DEBUG nova.compute.resource_tracker [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Instance 1e71cc99-f026-4197-b7f1-ae18f7f99af6 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61962) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1767}} [ 1587.349474] env[61962]: DEBUG nova.compute.resource_tracker [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Instance 2cf5409f-eaff-4b63-b8bb-eb15a7a81760 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61962) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1767}} [ 1587.362330] env[61962]: DEBUG nova.compute.resource_tracker [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Instance c3514756-a861-4662-b859-0881ced78e19 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61962) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1767}} [ 1587.375257] env[61962]: DEBUG nova.compute.resource_tracker [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Instance 02c198ea-61de-4e70-866c-39781d353d04 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61962) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1767}} [ 1587.375512] env[61962]: DEBUG nova.compute.resource_tracker [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Total usable vcpus: 48, total allocated vcpus: 10 {{(pid=61962) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 1587.375676] env[61962]: DEBUG nova.compute.resource_tracker [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=1792MB phys_disk=100GB used_disk=10GB total_vcpus=48 used_vcpus=10 pci_stats=[] stats={'failed_builds': '71', 'num_instances': '10', 'num_vm_building': '10', 'num_task_deleting': '9', 'num_os_type_None': '10', 'num_proj_99c377cc8b5545a2b6e712291b14cb04': '1', 'io_workload': '10', 'num_proj_696cc1bc78cb4511a76f788d82895fef': '1', 'num_proj_a9c2add945d24a299e5bb0fe543deec3': '1', 'num_proj_e816704de44649099e96127498eed7e9': '1', 'num_proj_eaa1993aea5f4f039f35b32e2321e30f': '1', 'num_proj_a31ea93dd586435ebb82c1dbfd14c2ad': '1', 'num_proj_34477c98bf9c4a87971a62cb96b072c7': '1', 'num_proj_e58b68272c404983993a09ad12e8c03b': '1', 'num_proj_88a7b37d54f047149b05fc80e0b51601': '1', 'num_task_spawning': '1', 'num_proj_40bdd99e47aa40a28edd38c6b4a09a5f': '1'} {{(pid=61962) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 1587.567438] env[61962]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c551becf-2279-422d-aee8-ff3025b9c682 {{(pid=61962) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1587.575753] env[61962]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b1438b71-fef3-4ea4-9414-0ba732ff7be5 {{(pid=61962) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1587.606411] env[61962]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ca95436f-b187-47dc-bf64-de56ea21038b {{(pid=61962) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1587.614603] env[61962]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a11cc560-f9e0-451b-80a5-827444075154 {{(pid=61962) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1587.629220] env[61962]: DEBUG nova.compute.provider_tree [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Inventory has not changed in ProviderTree for provider: 5a20dc57-fddd-49ec-bab5-953a03eebaa1 {{(pid=61962) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1587.640621] env[61962]: DEBUG nova.scheduler.client.report [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Inventory has not changed for provider 5a20dc57-fddd-49ec-bab5-953a03eebaa1 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 96, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61962) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1587.657902] env[61962]: DEBUG nova.compute.resource_tracker [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=61962) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 1587.658104] env[61962]: DEBUG oslo_concurrency.lockutils [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 0.425s {{(pid=61962) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1591.660185] env[61962]: DEBUG oslo_service.periodic_task [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=61962) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1591.660517] env[61962]: DEBUG nova.compute.manager [None req-ba337279-4320-40ca-b616-7e590432f203 None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=61962) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10626}} [ 1593.153946] env[61962]: DEBUG oslo_service.periodic_task [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=61962) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1594.158589] env[61962]: DEBUG oslo_service.periodic_task [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=61962) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1595.154700] env[61962]: DEBUG oslo_service.periodic_task [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Running periodic task ComputeManager._sync_scheduler_instance_info {{(pid=61962) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1597.158009] env[61962]: DEBUG oslo_service.periodic_task [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=61962) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1597.158396] env[61962]: DEBUG nova.compute.manager [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Starting heal instance info cache {{(pid=61962) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10007}} [ 1597.158396] env[61962]: DEBUG nova.compute.manager [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Rebuilding the list of instances to heal {{(pid=61962) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10011}} [ 1597.178774] env[61962]: DEBUG nova.compute.manager [None req-ba337279-4320-40ca-b616-7e590432f203 None None] [instance: 5572a1d3-5d2f-4af5-b9f6-ec57506e2069] Skipping network cache update for instance because it is Building. {{(pid=61962) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10020}} [ 1597.178933] env[61962]: DEBUG nova.compute.manager [None req-ba337279-4320-40ca-b616-7e590432f203 None None] [instance: d970d822-12ab-43cc-8d34-abc6eff8a68d] Skipping network cache update for instance because it is Building. {{(pid=61962) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10020}} [ 1597.179047] env[61962]: DEBUG nova.compute.manager [None req-ba337279-4320-40ca-b616-7e590432f203 None None] [instance: 35bf25a7-a3c2-40f1-b415-42d6167b0a1c] Skipping network cache update for instance because it is Building. {{(pid=61962) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10020}} [ 1597.179177] env[61962]: DEBUG nova.compute.manager [None req-ba337279-4320-40ca-b616-7e590432f203 None None] [instance: 1f7429fe-e6f3-4b0a-bae8-38ead4607ae7] Skipping network cache update for instance because it is Building. {{(pid=61962) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10020}} [ 1597.179298] env[61962]: DEBUG nova.compute.manager [None req-ba337279-4320-40ca-b616-7e590432f203 None None] [instance: c3c36949-754e-4bab-860d-74e0cde4dfd6] Skipping network cache update for instance because it is Building. {{(pid=61962) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10020}} [ 1597.179421] env[61962]: DEBUG nova.compute.manager [None req-ba337279-4320-40ca-b616-7e590432f203 None None] [instance: 6f0c884b-afb3-4d06-9dd2-f07099c364e0] Skipping network cache update for instance because it is Building. {{(pid=61962) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10020}} [ 1597.179540] env[61962]: DEBUG nova.compute.manager [None req-ba337279-4320-40ca-b616-7e590432f203 None None] [instance: 3d2749f8-18d2-4631-adc2-093988125eb4] Skipping network cache update for instance because it is Building. {{(pid=61962) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10020}} [ 1597.179660] env[61962]: DEBUG nova.compute.manager [None req-ba337279-4320-40ca-b616-7e590432f203 None None] [instance: 1035139f-2193-4d1c-a220-369ef6096ae0] Skipping network cache update for instance because it is Building. {{(pid=61962) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10020}} [ 1597.179802] env[61962]: DEBUG nova.compute.manager [None req-ba337279-4320-40ca-b616-7e590432f203 None None] [instance: 53048279-c3ea-484f-9a3e-049b12892f7a] Skipping network cache update for instance because it is Building. {{(pid=61962) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10020}} [ 1597.179908] env[61962]: DEBUG nova.compute.manager [None req-ba337279-4320-40ca-b616-7e590432f203 None None] [instance: 25f6fa54-3d0d-4470-b5c9-06ba0ad2663d] Skipping network cache update for instance because it is Building. {{(pid=61962) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10020}} [ 1597.180046] env[61962]: DEBUG nova.compute.manager [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Didn't find any instances for network info cache update. {{(pid=61962) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10093}} [ 1597.180512] env[61962]: DEBUG oslo_service.periodic_task [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=61962) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1600.159304] env[61962]: DEBUG oslo_service.periodic_task [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=61962) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1601.160266] env[61962]: DEBUG oslo_service.periodic_task [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=61962) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1603.158191] env[61962]: DEBUG oslo_service.periodic_task [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=61962) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1604.465568] env[61962]: DEBUG oslo_concurrency.lockutils [None req-24d091a0-c9e0-49ad-b717-f7f80f216b58 tempest-AttachVolumeNegativeTest-1446100430 tempest-AttachVolumeNegativeTest-1446100430-project-member] Acquiring lock "25f6fa54-3d0d-4470-b5c9-06ba0ad2663d" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=61962) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1621.972958] env[61962]: WARNING oslo_vmware.rw_handles [None req-7a230fc5-6af3-471e-9f4a-043813914279 tempest-ServerActionsTestJSON-2003961310 tempest-ServerActionsTestJSON-2003961310-project-member] Error occurred while reading the HTTP response.: http.client.RemoteDisconnected: Remote end closed connection without response [ 1621.972958] env[61962]: ERROR oslo_vmware.rw_handles Traceback (most recent call last): [ 1621.972958] env[61962]: ERROR oslo_vmware.rw_handles File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py", line 283, in close [ 1621.972958] env[61962]: ERROR oslo_vmware.rw_handles self._conn.getresponse() [ 1621.972958] env[61962]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 1375, in getresponse [ 1621.972958] env[61962]: ERROR oslo_vmware.rw_handles response.begin() [ 1621.972958] env[61962]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 318, in begin [ 1621.972958] env[61962]: ERROR oslo_vmware.rw_handles version, status, reason = self._read_status() [ 1621.972958] env[61962]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 287, in _read_status [ 1621.972958] env[61962]: ERROR oslo_vmware.rw_handles raise RemoteDisconnected("Remote end closed connection without" [ 1621.972958] env[61962]: ERROR oslo_vmware.rw_handles http.client.RemoteDisconnected: Remote end closed connection without response [ 1621.972958] env[61962]: ERROR oslo_vmware.rw_handles [ 1621.973747] env[61962]: DEBUG nova.virt.vmwareapi.images [None req-7a230fc5-6af3-471e-9f4a-043813914279 tempest-ServerActionsTestJSON-2003961310 tempest-ServerActionsTestJSON-2003961310-project-member] [instance: 5572a1d3-5d2f-4af5-b9f6-ec57506e2069] Downloaded image file data f684bb17-3ab2-4bd5-a19e-4c36c57d0ebe to vmware_temp/edf7d2d9-9154-4ef2-be57-3a9963ac274f/f684bb17-3ab2-4bd5-a19e-4c36c57d0ebe/tmp-sparse.vmdk on the data store datastore2 {{(pid=61962) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:258}} [ 1621.975431] env[61962]: DEBUG nova.virt.vmwareapi.vmops [None req-7a230fc5-6af3-471e-9f4a-043813914279 tempest-ServerActionsTestJSON-2003961310 tempest-ServerActionsTestJSON-2003961310-project-member] [instance: 5572a1d3-5d2f-4af5-b9f6-ec57506e2069] Caching image {{(pid=61962) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 1621.975706] env[61962]: DEBUG nova.virt.vmwareapi.vm_util [None req-7a230fc5-6af3-471e-9f4a-043813914279 tempest-ServerActionsTestJSON-2003961310 tempest-ServerActionsTestJSON-2003961310-project-member] Copying Virtual Disk [datastore2] vmware_temp/edf7d2d9-9154-4ef2-be57-3a9963ac274f/f684bb17-3ab2-4bd5-a19e-4c36c57d0ebe/tmp-sparse.vmdk to [datastore2] vmware_temp/edf7d2d9-9154-4ef2-be57-3a9963ac274f/f684bb17-3ab2-4bd5-a19e-4c36c57d0ebe/f684bb17-3ab2-4bd5-a19e-4c36c57d0ebe.vmdk {{(pid=61962) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 1621.976020] env[61962]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-07212804-f688-4d46-8ae8-39a5b7b1ac1f {{(pid=61962) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1621.985369] env[61962]: DEBUG oslo_vmware.api [None req-7a230fc5-6af3-471e-9f4a-043813914279 tempest-ServerActionsTestJSON-2003961310 tempest-ServerActionsTestJSON-2003961310-project-member] Waiting for the task: (returnval){ [ 1621.985369] env[61962]: value = "task-4892058" [ 1621.985369] env[61962]: _type = "Task" [ 1621.985369] env[61962]: } to complete. {{(pid=61962) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1621.998190] env[61962]: DEBUG oslo_vmware.api [None req-7a230fc5-6af3-471e-9f4a-043813914279 tempest-ServerActionsTestJSON-2003961310 tempest-ServerActionsTestJSON-2003961310-project-member] Task: {'id': task-4892058, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61962) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1622.497465] env[61962]: DEBUG oslo_vmware.exceptions [None req-7a230fc5-6af3-471e-9f4a-043813914279 tempest-ServerActionsTestJSON-2003961310 tempest-ServerActionsTestJSON-2003961310-project-member] Fault InvalidArgument not matched. {{(pid=61962) get_fault_class /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/exceptions.py:290}} [ 1622.497691] env[61962]: DEBUG oslo_concurrency.lockutils [None req-7a230fc5-6af3-471e-9f4a-043813914279 tempest-ServerActionsTestJSON-2003961310 tempest-ServerActionsTestJSON-2003961310-project-member] Releasing lock "[datastore2] devstack-image-cache_base/f684bb17-3ab2-4bd5-a19e-4c36c57d0ebe/f684bb17-3ab2-4bd5-a19e-4c36c57d0ebe.vmdk" {{(pid=61962) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1622.498250] env[61962]: ERROR nova.compute.manager [None req-7a230fc5-6af3-471e-9f4a-043813914279 tempest-ServerActionsTestJSON-2003961310 tempest-ServerActionsTestJSON-2003961310-project-member] [instance: 5572a1d3-5d2f-4af5-b9f6-ec57506e2069] Instance failed to spawn: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1622.498250] env[61962]: Faults: ['InvalidArgument'] [ 1622.498250] env[61962]: ERROR nova.compute.manager [instance: 5572a1d3-5d2f-4af5-b9f6-ec57506e2069] Traceback (most recent call last): [ 1622.498250] env[61962]: ERROR nova.compute.manager [instance: 5572a1d3-5d2f-4af5-b9f6-ec57506e2069] File "/opt/stack/nova/nova/compute/manager.py", line 2886, in _build_resources [ 1622.498250] env[61962]: ERROR nova.compute.manager [instance: 5572a1d3-5d2f-4af5-b9f6-ec57506e2069] yield resources [ 1622.498250] env[61962]: ERROR nova.compute.manager [instance: 5572a1d3-5d2f-4af5-b9f6-ec57506e2069] File "/opt/stack/nova/nova/compute/manager.py", line 2633, in _build_and_run_instance [ 1622.498250] env[61962]: ERROR nova.compute.manager [instance: 5572a1d3-5d2f-4af5-b9f6-ec57506e2069] self.driver.spawn(context, instance, image_meta, [ 1622.498250] env[61962]: ERROR nova.compute.manager [instance: 5572a1d3-5d2f-4af5-b9f6-ec57506e2069] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 1622.498250] env[61962]: ERROR nova.compute.manager [instance: 5572a1d3-5d2f-4af5-b9f6-ec57506e2069] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1622.498250] env[61962]: ERROR nova.compute.manager [instance: 5572a1d3-5d2f-4af5-b9f6-ec57506e2069] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 1622.498250] env[61962]: ERROR nova.compute.manager [instance: 5572a1d3-5d2f-4af5-b9f6-ec57506e2069] self._fetch_image_if_missing(context, vi) [ 1622.498250] env[61962]: ERROR nova.compute.manager [instance: 5572a1d3-5d2f-4af5-b9f6-ec57506e2069] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 1622.498724] env[61962]: ERROR nova.compute.manager [instance: 5572a1d3-5d2f-4af5-b9f6-ec57506e2069] image_cache(vi, tmp_image_ds_loc) [ 1622.498724] env[61962]: ERROR nova.compute.manager [instance: 5572a1d3-5d2f-4af5-b9f6-ec57506e2069] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 1622.498724] env[61962]: ERROR nova.compute.manager [instance: 5572a1d3-5d2f-4af5-b9f6-ec57506e2069] vm_util.copy_virtual_disk( [ 1622.498724] env[61962]: ERROR nova.compute.manager [instance: 5572a1d3-5d2f-4af5-b9f6-ec57506e2069] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 1622.498724] env[61962]: ERROR nova.compute.manager [instance: 5572a1d3-5d2f-4af5-b9f6-ec57506e2069] session._wait_for_task(vmdk_copy_task) [ 1622.498724] env[61962]: ERROR nova.compute.manager [instance: 5572a1d3-5d2f-4af5-b9f6-ec57506e2069] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 1622.498724] env[61962]: ERROR nova.compute.manager [instance: 5572a1d3-5d2f-4af5-b9f6-ec57506e2069] return self.wait_for_task(task_ref) [ 1622.498724] env[61962]: ERROR nova.compute.manager [instance: 5572a1d3-5d2f-4af5-b9f6-ec57506e2069] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 1622.498724] env[61962]: ERROR nova.compute.manager [instance: 5572a1d3-5d2f-4af5-b9f6-ec57506e2069] return evt.wait() [ 1622.498724] env[61962]: ERROR nova.compute.manager [instance: 5572a1d3-5d2f-4af5-b9f6-ec57506e2069] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 1622.498724] env[61962]: ERROR nova.compute.manager [instance: 5572a1d3-5d2f-4af5-b9f6-ec57506e2069] result = hub.switch() [ 1622.498724] env[61962]: ERROR nova.compute.manager [instance: 5572a1d3-5d2f-4af5-b9f6-ec57506e2069] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 1622.498724] env[61962]: ERROR nova.compute.manager [instance: 5572a1d3-5d2f-4af5-b9f6-ec57506e2069] return self.greenlet.switch() [ 1622.499135] env[61962]: ERROR nova.compute.manager [instance: 5572a1d3-5d2f-4af5-b9f6-ec57506e2069] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 1622.499135] env[61962]: ERROR nova.compute.manager [instance: 5572a1d3-5d2f-4af5-b9f6-ec57506e2069] self.f(*self.args, **self.kw) [ 1622.499135] env[61962]: ERROR nova.compute.manager [instance: 5572a1d3-5d2f-4af5-b9f6-ec57506e2069] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 1622.499135] env[61962]: ERROR nova.compute.manager [instance: 5572a1d3-5d2f-4af5-b9f6-ec57506e2069] raise exceptions.translate_fault(task_info.error) [ 1622.499135] env[61962]: ERROR nova.compute.manager [instance: 5572a1d3-5d2f-4af5-b9f6-ec57506e2069] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1622.499135] env[61962]: ERROR nova.compute.manager [instance: 5572a1d3-5d2f-4af5-b9f6-ec57506e2069] Faults: ['InvalidArgument'] [ 1622.499135] env[61962]: ERROR nova.compute.manager [instance: 5572a1d3-5d2f-4af5-b9f6-ec57506e2069] [ 1622.499135] env[61962]: INFO nova.compute.manager [None req-7a230fc5-6af3-471e-9f4a-043813914279 tempest-ServerActionsTestJSON-2003961310 tempest-ServerActionsTestJSON-2003961310-project-member] [instance: 5572a1d3-5d2f-4af5-b9f6-ec57506e2069] Terminating instance [ 1622.500824] env[61962]: DEBUG oslo_concurrency.lockutils [None req-315218da-fc87-4c93-928d-220a133db47c tempest-ServerShowV254Test-238986200 tempest-ServerShowV254Test-238986200-project-member] Acquired lock "[datastore2] devstack-image-cache_base/f684bb17-3ab2-4bd5-a19e-4c36c57d0ebe/f684bb17-3ab2-4bd5-a19e-4c36c57d0ebe.vmdk" {{(pid=61962) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1622.500824] env[61962]: DEBUG nova.virt.vmwareapi.ds_util [None req-315218da-fc87-4c93-928d-220a133db47c tempest-ServerShowV254Test-238986200 tempest-ServerShowV254Test-238986200-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=61962) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1622.500824] env[61962]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-9b1b3631-e4a3-4b95-bb4f-890b8dd794b0 {{(pid=61962) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1622.503128] env[61962]: DEBUG nova.compute.manager [None req-7a230fc5-6af3-471e-9f4a-043813914279 tempest-ServerActionsTestJSON-2003961310 tempest-ServerActionsTestJSON-2003961310-project-member] [instance: 5572a1d3-5d2f-4af5-b9f6-ec57506e2069] Start destroying the instance on the hypervisor. {{(pid=61962) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3142}} [ 1622.503326] env[61962]: DEBUG nova.virt.vmwareapi.vmops [None req-7a230fc5-6af3-471e-9f4a-043813914279 tempest-ServerActionsTestJSON-2003961310 tempest-ServerActionsTestJSON-2003961310-project-member] [instance: 5572a1d3-5d2f-4af5-b9f6-ec57506e2069] Destroying instance {{(pid=61962) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1622.504050] env[61962]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-490974b0-f558-4945-b168-ebc11221bb3c {{(pid=61962) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1622.511683] env[61962]: DEBUG nova.virt.vmwareapi.vmops [None req-7a230fc5-6af3-471e-9f4a-043813914279 tempest-ServerActionsTestJSON-2003961310 tempest-ServerActionsTestJSON-2003961310-project-member] [instance: 5572a1d3-5d2f-4af5-b9f6-ec57506e2069] Unregistering the VM {{(pid=61962) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 1622.511925] env[61962]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-77b252aa-ea05-4114-9c87-0e0ab431a015 {{(pid=61962) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1622.514185] env[61962]: DEBUG nova.virt.vmwareapi.ds_util [None req-315218da-fc87-4c93-928d-220a133db47c tempest-ServerShowV254Test-238986200 tempest-ServerShowV254Test-238986200-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=61962) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1622.514380] env[61962]: DEBUG nova.virt.vmwareapi.vmops [None req-315218da-fc87-4c93-928d-220a133db47c tempest-ServerShowV254Test-238986200 tempest-ServerShowV254Test-238986200-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=61962) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 1622.515452] env[61962]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-e8696ee3-a105-41d6-9ced-b9dc22d9b97e {{(pid=61962) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1622.520637] env[61962]: DEBUG oslo_vmware.api [None req-315218da-fc87-4c93-928d-220a133db47c tempest-ServerShowV254Test-238986200 tempest-ServerShowV254Test-238986200-project-member] Waiting for the task: (returnval){ [ 1622.520637] env[61962]: value = "session[5210918f-aadc-9b29-42fa-0929c2e42627]52a67786-2772-f7a1-9423-18134f82bfa6" [ 1622.520637] env[61962]: _type = "Task" [ 1622.520637] env[61962]: } to complete. {{(pid=61962) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1622.528351] env[61962]: DEBUG oslo_vmware.api [None req-315218da-fc87-4c93-928d-220a133db47c tempest-ServerShowV254Test-238986200 tempest-ServerShowV254Test-238986200-project-member] Task: {'id': session[5210918f-aadc-9b29-42fa-0929c2e42627]52a67786-2772-f7a1-9423-18134f82bfa6, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61962) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1622.592711] env[61962]: DEBUG nova.virt.vmwareapi.vmops [None req-7a230fc5-6af3-471e-9f4a-043813914279 tempest-ServerActionsTestJSON-2003961310 tempest-ServerActionsTestJSON-2003961310-project-member] [instance: 5572a1d3-5d2f-4af5-b9f6-ec57506e2069] Unregistered the VM {{(pid=61962) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 1622.592936] env[61962]: DEBUG nova.virt.vmwareapi.vmops [None req-7a230fc5-6af3-471e-9f4a-043813914279 tempest-ServerActionsTestJSON-2003961310 tempest-ServerActionsTestJSON-2003961310-project-member] [instance: 5572a1d3-5d2f-4af5-b9f6-ec57506e2069] Deleting contents of the VM from datastore datastore2 {{(pid=61962) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 1622.593137] env[61962]: DEBUG nova.virt.vmwareapi.ds_util [None req-7a230fc5-6af3-471e-9f4a-043813914279 tempest-ServerActionsTestJSON-2003961310 tempest-ServerActionsTestJSON-2003961310-project-member] Deleting the datastore file [datastore2] 5572a1d3-5d2f-4af5-b9f6-ec57506e2069 {{(pid=61962) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1622.593417] env[61962]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-0ec48306-c861-4200-b0bd-871276d342fb {{(pid=61962) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1622.600551] env[61962]: DEBUG oslo_vmware.api [None req-7a230fc5-6af3-471e-9f4a-043813914279 tempest-ServerActionsTestJSON-2003961310 tempest-ServerActionsTestJSON-2003961310-project-member] Waiting for the task: (returnval){ [ 1622.600551] env[61962]: value = "task-4892060" [ 1622.600551] env[61962]: _type = "Task" [ 1622.600551] env[61962]: } to complete. {{(pid=61962) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1622.608886] env[61962]: DEBUG oslo_vmware.api [None req-7a230fc5-6af3-471e-9f4a-043813914279 tempest-ServerActionsTestJSON-2003961310 tempest-ServerActionsTestJSON-2003961310-project-member] Task: {'id': task-4892060, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61962) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1623.033793] env[61962]: DEBUG nova.virt.vmwareapi.vmops [None req-315218da-fc87-4c93-928d-220a133db47c tempest-ServerShowV254Test-238986200 tempest-ServerShowV254Test-238986200-project-member] [instance: d970d822-12ab-43cc-8d34-abc6eff8a68d] Preparing fetch location {{(pid=61962) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 1623.034188] env[61962]: DEBUG nova.virt.vmwareapi.ds_util [None req-315218da-fc87-4c93-928d-220a133db47c tempest-ServerShowV254Test-238986200 tempest-ServerShowV254Test-238986200-project-member] Creating directory with path [datastore2] vmware_temp/840d3151-6d0f-4449-98ec-ba3644ea6c87/f684bb17-3ab2-4bd5-a19e-4c36c57d0ebe {{(pid=61962) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1623.034339] env[61962]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-43b06c0d-d454-400c-bc3b-9857891362cc {{(pid=61962) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1623.048262] env[61962]: DEBUG nova.virt.vmwareapi.ds_util [None req-315218da-fc87-4c93-928d-220a133db47c tempest-ServerShowV254Test-238986200 tempest-ServerShowV254Test-238986200-project-member] Created directory with path [datastore2] vmware_temp/840d3151-6d0f-4449-98ec-ba3644ea6c87/f684bb17-3ab2-4bd5-a19e-4c36c57d0ebe {{(pid=61962) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1623.048448] env[61962]: DEBUG nova.virt.vmwareapi.vmops [None req-315218da-fc87-4c93-928d-220a133db47c tempest-ServerShowV254Test-238986200 tempest-ServerShowV254Test-238986200-project-member] [instance: d970d822-12ab-43cc-8d34-abc6eff8a68d] Fetch image to [datastore2] vmware_temp/840d3151-6d0f-4449-98ec-ba3644ea6c87/f684bb17-3ab2-4bd5-a19e-4c36c57d0ebe/tmp-sparse.vmdk {{(pid=61962) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 1623.048617] env[61962]: DEBUG nova.virt.vmwareapi.vmops [None req-315218da-fc87-4c93-928d-220a133db47c tempest-ServerShowV254Test-238986200 tempest-ServerShowV254Test-238986200-project-member] [instance: d970d822-12ab-43cc-8d34-abc6eff8a68d] Downloading image file data f684bb17-3ab2-4bd5-a19e-4c36c57d0ebe to [datastore2] vmware_temp/840d3151-6d0f-4449-98ec-ba3644ea6c87/f684bb17-3ab2-4bd5-a19e-4c36c57d0ebe/tmp-sparse.vmdk on the data store datastore2 {{(pid=61962) _fetch_image_as_file /opt/stack/nova/nova/virt/vmwareapi/vmops.py:399}} [ 1623.049406] env[61962]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ffe69147-03f2-4c6f-83fb-04576b392e27 {{(pid=61962) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1623.056844] env[61962]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e027ac5d-35e8-44fa-9775-c8f3abe8557b {{(pid=61962) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1623.067034] env[61962]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-25aeaf1d-dbaa-4f84-9349-3fb91f3dfced {{(pid=61962) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1623.098718] env[61962]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2052e997-a703-4861-955f-be78d5ea6e72 {{(pid=61962) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1623.109066] env[61962]: DEBUG oslo_vmware.api [None req-7a230fc5-6af3-471e-9f4a-043813914279 tempest-ServerActionsTestJSON-2003961310 tempest-ServerActionsTestJSON-2003961310-project-member] Task: {'id': task-4892060, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.068604} completed successfully. {{(pid=61962) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1623.110626] env[61962]: DEBUG nova.virt.vmwareapi.ds_util [None req-7a230fc5-6af3-471e-9f4a-043813914279 tempest-ServerActionsTestJSON-2003961310 tempest-ServerActionsTestJSON-2003961310-project-member] Deleted the datastore file {{(pid=61962) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1623.110848] env[61962]: DEBUG nova.virt.vmwareapi.vmops [None req-7a230fc5-6af3-471e-9f4a-043813914279 tempest-ServerActionsTestJSON-2003961310 tempest-ServerActionsTestJSON-2003961310-project-member] [instance: 5572a1d3-5d2f-4af5-b9f6-ec57506e2069] Deleted contents of the VM from datastore datastore2 {{(pid=61962) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 1623.111048] env[61962]: DEBUG nova.virt.vmwareapi.vmops [None req-7a230fc5-6af3-471e-9f4a-043813914279 tempest-ServerActionsTestJSON-2003961310 tempest-ServerActionsTestJSON-2003961310-project-member] [instance: 5572a1d3-5d2f-4af5-b9f6-ec57506e2069] Instance destroyed {{(pid=61962) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1623.111232] env[61962]: INFO nova.compute.manager [None req-7a230fc5-6af3-471e-9f4a-043813914279 tempest-ServerActionsTestJSON-2003961310 tempest-ServerActionsTestJSON-2003961310-project-member] [instance: 5572a1d3-5d2f-4af5-b9f6-ec57506e2069] Took 0.61 seconds to destroy the instance on the hypervisor. [ 1623.113495] env[61962]: DEBUG nova.compute.claims [None req-7a230fc5-6af3-471e-9f4a-043813914279 tempest-ServerActionsTestJSON-2003961310 tempest-ServerActionsTestJSON-2003961310-project-member] [instance: 5572a1d3-5d2f-4af5-b9f6-ec57506e2069] Aborting claim: {{(pid=61962) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 1623.113743] env[61962]: DEBUG oslo_concurrency.lockutils [None req-7a230fc5-6af3-471e-9f4a-043813914279 tempest-ServerActionsTestJSON-2003961310 tempest-ServerActionsTestJSON-2003961310-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=61962) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1623.114130] env[61962]: DEBUG oslo_concurrency.lockutils [None req-7a230fc5-6af3-471e-9f4a-043813914279 tempest-ServerActionsTestJSON-2003961310 tempest-ServerActionsTestJSON-2003961310-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 0.000s {{(pid=61962) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1623.117516] env[61962]: DEBUG oslo_vmware.service [-] Invoking SessionManager.AcquireGenericServiceTicket with opID=oslo.vmware-e9e5cda6-e9c5-48dc-a4cf-185bab71081d {{(pid=61962) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1623.145253] env[61962]: DEBUG nova.virt.vmwareapi.images [None req-315218da-fc87-4c93-928d-220a133db47c tempest-ServerShowV254Test-238986200 tempest-ServerShowV254Test-238986200-project-member] [instance: d970d822-12ab-43cc-8d34-abc6eff8a68d] Downloading image file data f684bb17-3ab2-4bd5-a19e-4c36c57d0ebe to the data store datastore2 {{(pid=61962) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:245}} [ 1623.359620] env[61962]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-94955fca-89c3-4ec1-8497-8bd898e5f381 {{(pid=61962) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1623.367496] env[61962]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ec9826b8-4d0a-4a3d-8ffa-9604f2351dc8 {{(pid=61962) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1623.400207] env[61962]: DEBUG oslo_concurrency.lockutils [None req-315218da-fc87-4c93-928d-220a133db47c tempest-ServerShowV254Test-238986200 tempest-ServerShowV254Test-238986200-project-member] Releasing lock "[datastore2] devstack-image-cache_base/f684bb17-3ab2-4bd5-a19e-4c36c57d0ebe/f684bb17-3ab2-4bd5-a19e-4c36c57d0ebe.vmdk" {{(pid=61962) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1623.401764] env[61962]: ERROR nova.compute.manager [None req-315218da-fc87-4c93-928d-220a133db47c tempest-ServerShowV254Test-238986200 tempest-ServerShowV254Test-238986200-project-member] [instance: d970d822-12ab-43cc-8d34-abc6eff8a68d] Instance failed to spawn: nova.exception.ImageNotAuthorized: Not authorized for image f684bb17-3ab2-4bd5-a19e-4c36c57d0ebe. [ 1623.401764] env[61962]: ERROR nova.compute.manager [instance: d970d822-12ab-43cc-8d34-abc6eff8a68d] Traceback (most recent call last): [ 1623.401764] env[61962]: ERROR nova.compute.manager [instance: d970d822-12ab-43cc-8d34-abc6eff8a68d] File "/opt/stack/nova/nova/image/glance.py", line 285, in show [ 1623.401764] env[61962]: ERROR nova.compute.manager [instance: d970d822-12ab-43cc-8d34-abc6eff8a68d] image = self._client.call(context, 2, 'get', args=(image_id,)) [ 1623.401764] env[61962]: ERROR nova.compute.manager [instance: d970d822-12ab-43cc-8d34-abc6eff8a68d] File "/opt/stack/nova/nova/image/glance.py", line 191, in call [ 1623.401764] env[61962]: ERROR nova.compute.manager [instance: d970d822-12ab-43cc-8d34-abc6eff8a68d] result = getattr(controller, method)(*args, **kwargs) [ 1623.401764] env[61962]: ERROR nova.compute.manager [instance: d970d822-12ab-43cc-8d34-abc6eff8a68d] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/v2/images.py", line 197, in get [ 1623.401764] env[61962]: ERROR nova.compute.manager [instance: d970d822-12ab-43cc-8d34-abc6eff8a68d] return self._get(image_id) [ 1623.401764] env[61962]: ERROR nova.compute.manager [instance: d970d822-12ab-43cc-8d34-abc6eff8a68d] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/common/utils.py", line 649, in inner [ 1623.401764] env[61962]: ERROR nova.compute.manager [instance: d970d822-12ab-43cc-8d34-abc6eff8a68d] return RequestIdProxy(wrapped(*args, **kwargs)) [ 1623.401764] env[61962]: ERROR nova.compute.manager [instance: d970d822-12ab-43cc-8d34-abc6eff8a68d] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/v2/images.py", line 190, in _get [ 1623.402180] env[61962]: ERROR nova.compute.manager [instance: d970d822-12ab-43cc-8d34-abc6eff8a68d] resp, body = self.http_client.get(url, headers=header) [ 1623.402180] env[61962]: ERROR nova.compute.manager [instance: d970d822-12ab-43cc-8d34-abc6eff8a68d] File "/opt/stack/data/venv/lib/python3.10/site-packages/keystoneauth1/adapter.py", line 393, in get [ 1623.402180] env[61962]: ERROR nova.compute.manager [instance: d970d822-12ab-43cc-8d34-abc6eff8a68d] return self.request(url, 'GET', **kwargs) [ 1623.402180] env[61962]: ERROR nova.compute.manager [instance: d970d822-12ab-43cc-8d34-abc6eff8a68d] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/common/http.py", line 380, in request [ 1623.402180] env[61962]: ERROR nova.compute.manager [instance: d970d822-12ab-43cc-8d34-abc6eff8a68d] return self._handle_response(resp) [ 1623.402180] env[61962]: ERROR nova.compute.manager [instance: d970d822-12ab-43cc-8d34-abc6eff8a68d] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/common/http.py", line 120, in _handle_response [ 1623.402180] env[61962]: ERROR nova.compute.manager [instance: d970d822-12ab-43cc-8d34-abc6eff8a68d] raise exc.from_response(resp, resp.content) [ 1623.402180] env[61962]: ERROR nova.compute.manager [instance: d970d822-12ab-43cc-8d34-abc6eff8a68d] glanceclient.exc.HTTPUnauthorized: HTTP 401 Unauthorized: This server could not verify that you are authorized to access the document you requested. Either you supplied the wrong credentials (e.g., bad password), or your browser does not understand how to supply the credentials required. [ 1623.402180] env[61962]: ERROR nova.compute.manager [instance: d970d822-12ab-43cc-8d34-abc6eff8a68d] [ 1623.402180] env[61962]: ERROR nova.compute.manager [instance: d970d822-12ab-43cc-8d34-abc6eff8a68d] During handling of the above exception, another exception occurred: [ 1623.402180] env[61962]: ERROR nova.compute.manager [instance: d970d822-12ab-43cc-8d34-abc6eff8a68d] [ 1623.402180] env[61962]: ERROR nova.compute.manager [instance: d970d822-12ab-43cc-8d34-abc6eff8a68d] Traceback (most recent call last): [ 1623.402544] env[61962]: ERROR nova.compute.manager [instance: d970d822-12ab-43cc-8d34-abc6eff8a68d] File "/opt/stack/nova/nova/compute/manager.py", line 2886, in _build_resources [ 1623.402544] env[61962]: ERROR nova.compute.manager [instance: d970d822-12ab-43cc-8d34-abc6eff8a68d] yield resources [ 1623.402544] env[61962]: ERROR nova.compute.manager [instance: d970d822-12ab-43cc-8d34-abc6eff8a68d] File "/opt/stack/nova/nova/compute/manager.py", line 2633, in _build_and_run_instance [ 1623.402544] env[61962]: ERROR nova.compute.manager [instance: d970d822-12ab-43cc-8d34-abc6eff8a68d] self.driver.spawn(context, instance, image_meta, [ 1623.402544] env[61962]: ERROR nova.compute.manager [instance: d970d822-12ab-43cc-8d34-abc6eff8a68d] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 1623.402544] env[61962]: ERROR nova.compute.manager [instance: d970d822-12ab-43cc-8d34-abc6eff8a68d] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1623.402544] env[61962]: ERROR nova.compute.manager [instance: d970d822-12ab-43cc-8d34-abc6eff8a68d] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 1623.402544] env[61962]: ERROR nova.compute.manager [instance: d970d822-12ab-43cc-8d34-abc6eff8a68d] self._fetch_image_if_missing(context, vi) [ 1623.402544] env[61962]: ERROR nova.compute.manager [instance: d970d822-12ab-43cc-8d34-abc6eff8a68d] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 637, in _fetch_image_if_missing [ 1623.402544] env[61962]: ERROR nova.compute.manager [instance: d970d822-12ab-43cc-8d34-abc6eff8a68d] image_fetch(context, vi, tmp_image_ds_loc) [ 1623.402544] env[61962]: ERROR nova.compute.manager [instance: d970d822-12ab-43cc-8d34-abc6eff8a68d] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 420, in _fetch_image_as_file [ 1623.402544] env[61962]: ERROR nova.compute.manager [instance: d970d822-12ab-43cc-8d34-abc6eff8a68d] images.fetch_image( [ 1623.402544] env[61962]: ERROR nova.compute.manager [instance: d970d822-12ab-43cc-8d34-abc6eff8a68d] File "/opt/stack/nova/nova/virt/vmwareapi/images.py", line 251, in fetch_image [ 1623.402935] env[61962]: ERROR nova.compute.manager [instance: d970d822-12ab-43cc-8d34-abc6eff8a68d] metadata = IMAGE_API.get(context, image_ref) [ 1623.402935] env[61962]: ERROR nova.compute.manager [instance: d970d822-12ab-43cc-8d34-abc6eff8a68d] File "/opt/stack/nova/nova/image/glance.py", line 1206, in get [ 1623.402935] env[61962]: ERROR nova.compute.manager [instance: d970d822-12ab-43cc-8d34-abc6eff8a68d] return session.show(context, image_id, [ 1623.402935] env[61962]: ERROR nova.compute.manager [instance: d970d822-12ab-43cc-8d34-abc6eff8a68d] File "/opt/stack/nova/nova/image/glance.py", line 287, in show [ 1623.402935] env[61962]: ERROR nova.compute.manager [instance: d970d822-12ab-43cc-8d34-abc6eff8a68d] _reraise_translated_image_exception(image_id) [ 1623.402935] env[61962]: ERROR nova.compute.manager [instance: d970d822-12ab-43cc-8d34-abc6eff8a68d] File "/opt/stack/nova/nova/image/glance.py", line 1032, in _reraise_translated_image_exception [ 1623.402935] env[61962]: ERROR nova.compute.manager [instance: d970d822-12ab-43cc-8d34-abc6eff8a68d] raise new_exc.with_traceback(exc_trace) [ 1623.402935] env[61962]: ERROR nova.compute.manager [instance: d970d822-12ab-43cc-8d34-abc6eff8a68d] File "/opt/stack/nova/nova/image/glance.py", line 285, in show [ 1623.402935] env[61962]: ERROR nova.compute.manager [instance: d970d822-12ab-43cc-8d34-abc6eff8a68d] image = self._client.call(context, 2, 'get', args=(image_id,)) [ 1623.402935] env[61962]: ERROR nova.compute.manager [instance: d970d822-12ab-43cc-8d34-abc6eff8a68d] File "/opt/stack/nova/nova/image/glance.py", line 191, in call [ 1623.402935] env[61962]: ERROR nova.compute.manager [instance: d970d822-12ab-43cc-8d34-abc6eff8a68d] result = getattr(controller, method)(*args, **kwargs) [ 1623.402935] env[61962]: ERROR nova.compute.manager [instance: d970d822-12ab-43cc-8d34-abc6eff8a68d] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/v2/images.py", line 197, in get [ 1623.402935] env[61962]: ERROR nova.compute.manager [instance: d970d822-12ab-43cc-8d34-abc6eff8a68d] return self._get(image_id) [ 1623.403347] env[61962]: ERROR nova.compute.manager [instance: d970d822-12ab-43cc-8d34-abc6eff8a68d] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/common/utils.py", line 649, in inner [ 1623.403347] env[61962]: ERROR nova.compute.manager [instance: d970d822-12ab-43cc-8d34-abc6eff8a68d] return RequestIdProxy(wrapped(*args, **kwargs)) [ 1623.403347] env[61962]: ERROR nova.compute.manager [instance: d970d822-12ab-43cc-8d34-abc6eff8a68d] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/v2/images.py", line 190, in _get [ 1623.403347] env[61962]: ERROR nova.compute.manager [instance: d970d822-12ab-43cc-8d34-abc6eff8a68d] resp, body = self.http_client.get(url, headers=header) [ 1623.403347] env[61962]: ERROR nova.compute.manager [instance: d970d822-12ab-43cc-8d34-abc6eff8a68d] File "/opt/stack/data/venv/lib/python3.10/site-packages/keystoneauth1/adapter.py", line 393, in get [ 1623.403347] env[61962]: ERROR nova.compute.manager [instance: d970d822-12ab-43cc-8d34-abc6eff8a68d] return self.request(url, 'GET', **kwargs) [ 1623.403347] env[61962]: ERROR nova.compute.manager [instance: d970d822-12ab-43cc-8d34-abc6eff8a68d] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/common/http.py", line 380, in request [ 1623.403347] env[61962]: ERROR nova.compute.manager [instance: d970d822-12ab-43cc-8d34-abc6eff8a68d] return self._handle_response(resp) [ 1623.403347] env[61962]: ERROR nova.compute.manager [instance: d970d822-12ab-43cc-8d34-abc6eff8a68d] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/common/http.py", line 120, in _handle_response [ 1623.403347] env[61962]: ERROR nova.compute.manager [instance: d970d822-12ab-43cc-8d34-abc6eff8a68d] raise exc.from_response(resp, resp.content) [ 1623.403347] env[61962]: ERROR nova.compute.manager [instance: d970d822-12ab-43cc-8d34-abc6eff8a68d] nova.exception.ImageNotAuthorized: Not authorized for image f684bb17-3ab2-4bd5-a19e-4c36c57d0ebe. [ 1623.403347] env[61962]: ERROR nova.compute.manager [instance: d970d822-12ab-43cc-8d34-abc6eff8a68d] [ 1623.403649] env[61962]: INFO nova.compute.manager [None req-315218da-fc87-4c93-928d-220a133db47c tempest-ServerShowV254Test-238986200 tempest-ServerShowV254Test-238986200-project-member] [instance: d970d822-12ab-43cc-8d34-abc6eff8a68d] Terminating instance [ 1623.404137] env[61962]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-edb32a57-6bde-4538-8f26-9c21582f71e4 {{(pid=61962) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1623.406733] env[61962]: DEBUG oslo_concurrency.lockutils [None req-673cd958-9a31-4e2b-aba4-288eb9a7a931 tempest-ServersTestManualDisk-1747286721 tempest-ServersTestManualDisk-1747286721-project-member] Acquired lock "[datastore2] devstack-image-cache_base/f684bb17-3ab2-4bd5-a19e-4c36c57d0ebe/f684bb17-3ab2-4bd5-a19e-4c36c57d0ebe.vmdk" {{(pid=61962) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1623.406941] env[61962]: DEBUG nova.virt.vmwareapi.ds_util [None req-673cd958-9a31-4e2b-aba4-288eb9a7a931 tempest-ServersTestManualDisk-1747286721 tempest-ServersTestManualDisk-1747286721-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=61962) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1623.407448] env[61962]: DEBUG oslo_concurrency.lockutils [None req-315218da-fc87-4c93-928d-220a133db47c tempest-ServerShowV254Test-238986200 tempest-ServerShowV254Test-238986200-project-member] Acquiring lock "refresh_cache-d970d822-12ab-43cc-8d34-abc6eff8a68d" {{(pid=61962) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1623.407603] env[61962]: DEBUG oslo_concurrency.lockutils [None req-315218da-fc87-4c93-928d-220a133db47c tempest-ServerShowV254Test-238986200 tempest-ServerShowV254Test-238986200-project-member] Acquired lock "refresh_cache-d970d822-12ab-43cc-8d34-abc6eff8a68d" {{(pid=61962) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1623.407765] env[61962]: DEBUG nova.network.neutron [None req-315218da-fc87-4c93-928d-220a133db47c tempest-ServerShowV254Test-238986200 tempest-ServerShowV254Test-238986200-project-member] [instance: d970d822-12ab-43cc-8d34-abc6eff8a68d] Building network info cache for instance {{(pid=61962) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 1623.408683] env[61962]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-2db3a437-82ed-43d7-a6a3-c3e1043cb12e {{(pid=61962) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1623.417428] env[61962]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a5292c37-a328-45e7-8958-7ef4ed18d125 {{(pid=61962) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1623.423942] env[61962]: DEBUG nova.virt.vmwareapi.ds_util [None req-673cd958-9a31-4e2b-aba4-288eb9a7a931 tempest-ServersTestManualDisk-1747286721 tempest-ServersTestManualDisk-1747286721-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=61962) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1623.424131] env[61962]: DEBUG nova.virt.vmwareapi.vmops [None req-673cd958-9a31-4e2b-aba4-288eb9a7a931 tempest-ServersTestManualDisk-1747286721 tempest-ServersTestManualDisk-1747286721-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=61962) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 1623.426133] env[61962]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-d05adc9d-b86e-4b28-b956-f0f6f3726d13 {{(pid=61962) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1623.436736] env[61962]: DEBUG nova.compute.provider_tree [None req-7a230fc5-6af3-471e-9f4a-043813914279 tempest-ServerActionsTestJSON-2003961310 tempest-ServerActionsTestJSON-2003961310-project-member] Inventory has not changed in ProviderTree for provider: 5a20dc57-fddd-49ec-bab5-953a03eebaa1 {{(pid=61962) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1623.439029] env[61962]: DEBUG oslo_vmware.api [None req-673cd958-9a31-4e2b-aba4-288eb9a7a931 tempest-ServersTestManualDisk-1747286721 tempest-ServersTestManualDisk-1747286721-project-member] Waiting for the task: (returnval){ [ 1623.439029] env[61962]: value = "session[5210918f-aadc-9b29-42fa-0929c2e42627]521e361a-6dc5-13a0-bf2f-30c2a33c486a" [ 1623.439029] env[61962]: _type = "Task" [ 1623.439029] env[61962]: } to complete. {{(pid=61962) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1623.443023] env[61962]: DEBUG nova.network.neutron [None req-315218da-fc87-4c93-928d-220a133db47c tempest-ServerShowV254Test-238986200 tempest-ServerShowV254Test-238986200-project-member] [instance: d970d822-12ab-43cc-8d34-abc6eff8a68d] Instance cache missing network info. {{(pid=61962) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 1623.446389] env[61962]: DEBUG nova.scheduler.client.report [None req-7a230fc5-6af3-471e-9f4a-043813914279 tempest-ServerActionsTestJSON-2003961310 tempest-ServerActionsTestJSON-2003961310-project-member] Inventory has not changed for provider 5a20dc57-fddd-49ec-bab5-953a03eebaa1 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 96, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61962) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1623.453277] env[61962]: DEBUG nova.virt.vmwareapi.vmops [None req-673cd958-9a31-4e2b-aba4-288eb9a7a931 tempest-ServersTestManualDisk-1747286721 tempest-ServersTestManualDisk-1747286721-project-member] [instance: 35bf25a7-a3c2-40f1-b415-42d6167b0a1c] Preparing fetch location {{(pid=61962) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 1623.453536] env[61962]: DEBUG nova.virt.vmwareapi.ds_util [None req-673cd958-9a31-4e2b-aba4-288eb9a7a931 tempest-ServersTestManualDisk-1747286721 tempest-ServersTestManualDisk-1747286721-project-member] Creating directory with path [datastore2] vmware_temp/ed91b0c2-226a-4b49-8794-2958a9d7ebab/f684bb17-3ab2-4bd5-a19e-4c36c57d0ebe {{(pid=61962) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1623.453800] env[61962]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-1bfc59c7-4b60-4666-a04c-ce5d48d8eafa {{(pid=61962) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1623.466023] env[61962]: DEBUG oslo_concurrency.lockutils [None req-7a230fc5-6af3-471e-9f4a-043813914279 tempest-ServerActionsTestJSON-2003961310 tempest-ServerActionsTestJSON-2003961310-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 0.350s {{(pid=61962) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1623.466023] env[61962]: ERROR nova.compute.manager [None req-7a230fc5-6af3-471e-9f4a-043813914279 tempest-ServerActionsTestJSON-2003961310 tempest-ServerActionsTestJSON-2003961310-project-member] [instance: 5572a1d3-5d2f-4af5-b9f6-ec57506e2069] Failed to build and run instance: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1623.466023] env[61962]: Faults: ['InvalidArgument'] [ 1623.466023] env[61962]: ERROR nova.compute.manager [instance: 5572a1d3-5d2f-4af5-b9f6-ec57506e2069] Traceback (most recent call last): [ 1623.466023] env[61962]: ERROR nova.compute.manager [instance: 5572a1d3-5d2f-4af5-b9f6-ec57506e2069] File "/opt/stack/nova/nova/compute/manager.py", line 2633, in _build_and_run_instance [ 1623.466023] env[61962]: ERROR nova.compute.manager [instance: 5572a1d3-5d2f-4af5-b9f6-ec57506e2069] self.driver.spawn(context, instance, image_meta, [ 1623.466023] env[61962]: ERROR nova.compute.manager [instance: 5572a1d3-5d2f-4af5-b9f6-ec57506e2069] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 1623.466023] env[61962]: ERROR nova.compute.manager [instance: 5572a1d3-5d2f-4af5-b9f6-ec57506e2069] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1623.466023] env[61962]: ERROR nova.compute.manager [instance: 5572a1d3-5d2f-4af5-b9f6-ec57506e2069] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 1623.466023] env[61962]: ERROR nova.compute.manager [instance: 5572a1d3-5d2f-4af5-b9f6-ec57506e2069] self._fetch_image_if_missing(context, vi) [ 1623.466482] env[61962]: ERROR nova.compute.manager [instance: 5572a1d3-5d2f-4af5-b9f6-ec57506e2069] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 1623.466482] env[61962]: ERROR nova.compute.manager [instance: 5572a1d3-5d2f-4af5-b9f6-ec57506e2069] image_cache(vi, tmp_image_ds_loc) [ 1623.466482] env[61962]: ERROR nova.compute.manager [instance: 5572a1d3-5d2f-4af5-b9f6-ec57506e2069] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 1623.466482] env[61962]: ERROR nova.compute.manager [instance: 5572a1d3-5d2f-4af5-b9f6-ec57506e2069] vm_util.copy_virtual_disk( [ 1623.466482] env[61962]: ERROR nova.compute.manager [instance: 5572a1d3-5d2f-4af5-b9f6-ec57506e2069] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 1623.466482] env[61962]: ERROR nova.compute.manager [instance: 5572a1d3-5d2f-4af5-b9f6-ec57506e2069] session._wait_for_task(vmdk_copy_task) [ 1623.466482] env[61962]: ERROR nova.compute.manager [instance: 5572a1d3-5d2f-4af5-b9f6-ec57506e2069] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 1623.466482] env[61962]: ERROR nova.compute.manager [instance: 5572a1d3-5d2f-4af5-b9f6-ec57506e2069] return self.wait_for_task(task_ref) [ 1623.466482] env[61962]: ERROR nova.compute.manager [instance: 5572a1d3-5d2f-4af5-b9f6-ec57506e2069] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 1623.466482] env[61962]: ERROR nova.compute.manager [instance: 5572a1d3-5d2f-4af5-b9f6-ec57506e2069] return evt.wait() [ 1623.466482] env[61962]: ERROR nova.compute.manager [instance: 5572a1d3-5d2f-4af5-b9f6-ec57506e2069] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 1623.466482] env[61962]: ERROR nova.compute.manager [instance: 5572a1d3-5d2f-4af5-b9f6-ec57506e2069] result = hub.switch() [ 1623.466482] env[61962]: ERROR nova.compute.manager [instance: 5572a1d3-5d2f-4af5-b9f6-ec57506e2069] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 1623.466868] env[61962]: ERROR nova.compute.manager [instance: 5572a1d3-5d2f-4af5-b9f6-ec57506e2069] return self.greenlet.switch() [ 1623.466868] env[61962]: ERROR nova.compute.manager [instance: 5572a1d3-5d2f-4af5-b9f6-ec57506e2069] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 1623.466868] env[61962]: ERROR nova.compute.manager [instance: 5572a1d3-5d2f-4af5-b9f6-ec57506e2069] self.f(*self.args, **self.kw) [ 1623.466868] env[61962]: ERROR nova.compute.manager [instance: 5572a1d3-5d2f-4af5-b9f6-ec57506e2069] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 1623.466868] env[61962]: ERROR nova.compute.manager [instance: 5572a1d3-5d2f-4af5-b9f6-ec57506e2069] raise exceptions.translate_fault(task_info.error) [ 1623.466868] env[61962]: ERROR nova.compute.manager [instance: 5572a1d3-5d2f-4af5-b9f6-ec57506e2069] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1623.466868] env[61962]: ERROR nova.compute.manager [instance: 5572a1d3-5d2f-4af5-b9f6-ec57506e2069] Faults: ['InvalidArgument'] [ 1623.466868] env[61962]: ERROR nova.compute.manager [instance: 5572a1d3-5d2f-4af5-b9f6-ec57506e2069] [ 1623.466868] env[61962]: DEBUG nova.compute.utils [None req-7a230fc5-6af3-471e-9f4a-043813914279 tempest-ServerActionsTestJSON-2003961310 tempest-ServerActionsTestJSON-2003961310-project-member] [instance: 5572a1d3-5d2f-4af5-b9f6-ec57506e2069] VimFaultException {{(pid=61962) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 1623.467111] env[61962]: DEBUG nova.compute.manager [None req-7a230fc5-6af3-471e-9f4a-043813914279 tempest-ServerActionsTestJSON-2003961310 tempest-ServerActionsTestJSON-2003961310-project-member] [instance: 5572a1d3-5d2f-4af5-b9f6-ec57506e2069] Build of instance 5572a1d3-5d2f-4af5-b9f6-ec57506e2069 was re-scheduled: A specified parameter was not correct: fileType [ 1623.467111] env[61962]: Faults: ['InvalidArgument'] {{(pid=61962) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2472}} [ 1623.467359] env[61962]: DEBUG nova.compute.manager [None req-7a230fc5-6af3-471e-9f4a-043813914279 tempest-ServerActionsTestJSON-2003961310 tempest-ServerActionsTestJSON-2003961310-project-member] [instance: 5572a1d3-5d2f-4af5-b9f6-ec57506e2069] Unplugging VIFs for instance {{(pid=61962) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:2998}} [ 1623.467533] env[61962]: DEBUG nova.compute.manager [None req-7a230fc5-6af3-471e-9f4a-043813914279 tempest-ServerActionsTestJSON-2003961310 tempest-ServerActionsTestJSON-2003961310-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=61962) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3021}} [ 1623.467703] env[61962]: DEBUG nova.compute.manager [None req-7a230fc5-6af3-471e-9f4a-043813914279 tempest-ServerActionsTestJSON-2003961310 tempest-ServerActionsTestJSON-2003961310-project-member] [instance: 5572a1d3-5d2f-4af5-b9f6-ec57506e2069] Deallocating network for instance {{(pid=61962) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2281}} [ 1623.467873] env[61962]: DEBUG nova.network.neutron [None req-7a230fc5-6af3-471e-9f4a-043813914279 tempest-ServerActionsTestJSON-2003961310 tempest-ServerActionsTestJSON-2003961310-project-member] [instance: 5572a1d3-5d2f-4af5-b9f6-ec57506e2069] deallocate_for_instance() {{(pid=61962) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1623.476956] env[61962]: DEBUG nova.virt.vmwareapi.ds_util [None req-673cd958-9a31-4e2b-aba4-288eb9a7a931 tempest-ServersTestManualDisk-1747286721 tempest-ServersTestManualDisk-1747286721-project-member] Created directory with path [datastore2] vmware_temp/ed91b0c2-226a-4b49-8794-2958a9d7ebab/f684bb17-3ab2-4bd5-a19e-4c36c57d0ebe {{(pid=61962) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1623.477186] env[61962]: DEBUG nova.virt.vmwareapi.vmops [None req-673cd958-9a31-4e2b-aba4-288eb9a7a931 tempest-ServersTestManualDisk-1747286721 tempest-ServersTestManualDisk-1747286721-project-member] [instance: 35bf25a7-a3c2-40f1-b415-42d6167b0a1c] Fetch image to [datastore2] vmware_temp/ed91b0c2-226a-4b49-8794-2958a9d7ebab/f684bb17-3ab2-4bd5-a19e-4c36c57d0ebe/tmp-sparse.vmdk {{(pid=61962) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 1623.477415] env[61962]: DEBUG nova.virt.vmwareapi.vmops [None req-673cd958-9a31-4e2b-aba4-288eb9a7a931 tempest-ServersTestManualDisk-1747286721 tempest-ServersTestManualDisk-1747286721-project-member] [instance: 35bf25a7-a3c2-40f1-b415-42d6167b0a1c] Downloading image file data f684bb17-3ab2-4bd5-a19e-4c36c57d0ebe to [datastore2] vmware_temp/ed91b0c2-226a-4b49-8794-2958a9d7ebab/f684bb17-3ab2-4bd5-a19e-4c36c57d0ebe/tmp-sparse.vmdk on the data store datastore2 {{(pid=61962) _fetch_image_as_file /opt/stack/nova/nova/virt/vmwareapi/vmops.py:399}} [ 1623.478204] env[61962]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f04949e9-0ae9-4450-85a8-62f022af5534 {{(pid=61962) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1623.486528] env[61962]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5efab224-d7bb-4cbf-9832-de12b87439a5 {{(pid=61962) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1623.497731] env[61962]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-30729b8e-8987-4572-85e0-6ce06d3e9ef3 {{(pid=61962) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1623.533944] env[61962]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b88ad196-01cc-4681-a20f-040bb2466c18 {{(pid=61962) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1623.539724] env[61962]: DEBUG nova.network.neutron [None req-315218da-fc87-4c93-928d-220a133db47c tempest-ServerShowV254Test-238986200 tempest-ServerShowV254Test-238986200-project-member] [instance: d970d822-12ab-43cc-8d34-abc6eff8a68d] Updating instance_info_cache with network_info: [] {{(pid=61962) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1623.542749] env[61962]: DEBUG oslo_vmware.service [-] Invoking SessionManager.AcquireGenericServiceTicket with opID=oslo.vmware-36f41e06-7eac-4457-bfd3-40042315208d {{(pid=61962) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1623.548861] env[61962]: DEBUG oslo_concurrency.lockutils [None req-315218da-fc87-4c93-928d-220a133db47c tempest-ServerShowV254Test-238986200 tempest-ServerShowV254Test-238986200-project-member] Releasing lock "refresh_cache-d970d822-12ab-43cc-8d34-abc6eff8a68d" {{(pid=61962) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1623.549274] env[61962]: DEBUG nova.compute.manager [None req-315218da-fc87-4c93-928d-220a133db47c tempest-ServerShowV254Test-238986200 tempest-ServerShowV254Test-238986200-project-member] [instance: d970d822-12ab-43cc-8d34-abc6eff8a68d] Start destroying the instance on the hypervisor. {{(pid=61962) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3142}} [ 1623.549457] env[61962]: DEBUG nova.virt.vmwareapi.vmops [None req-315218da-fc87-4c93-928d-220a133db47c tempest-ServerShowV254Test-238986200 tempest-ServerShowV254Test-238986200-project-member] [instance: d970d822-12ab-43cc-8d34-abc6eff8a68d] Destroying instance {{(pid=61962) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1623.550523] env[61962]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fc2635fb-6421-4eed-845a-2513b8ac4ca6 {{(pid=61962) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1623.560358] env[61962]: DEBUG nova.virt.vmwareapi.vmops [None req-315218da-fc87-4c93-928d-220a133db47c tempest-ServerShowV254Test-238986200 tempest-ServerShowV254Test-238986200-project-member] [instance: d970d822-12ab-43cc-8d34-abc6eff8a68d] Unregistering the VM {{(pid=61962) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 1623.560673] env[61962]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-1824d457-ab24-4621-987d-1883e446c3ec {{(pid=61962) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1623.567283] env[61962]: DEBUG nova.virt.vmwareapi.images [None req-673cd958-9a31-4e2b-aba4-288eb9a7a931 tempest-ServersTestManualDisk-1747286721 tempest-ServersTestManualDisk-1747286721-project-member] [instance: 35bf25a7-a3c2-40f1-b415-42d6167b0a1c] Downloading image file data f684bb17-3ab2-4bd5-a19e-4c36c57d0ebe to the data store datastore2 {{(pid=61962) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:245}} [ 1623.621923] env[61962]: DEBUG oslo_vmware.rw_handles [None req-673cd958-9a31-4e2b-aba4-288eb9a7a931 tempest-ServersTestManualDisk-1747286721 tempest-ServersTestManualDisk-1747286721-project-member] Creating HTTP connection to write to file with size = 21318656 and URL = https://esx7c1n1.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/ed91b0c2-226a-4b49-8794-2958a9d7ebab/f684bb17-3ab2-4bd5-a19e-4c36c57d0ebe/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=61962) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 1623.681677] env[61962]: DEBUG oslo_vmware.rw_handles [None req-673cd958-9a31-4e2b-aba4-288eb9a7a931 tempest-ServersTestManualDisk-1747286721 tempest-ServersTestManualDisk-1747286721-project-member] Completed reading data from the image iterator. {{(pid=61962) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 1623.681891] env[61962]: DEBUG oslo_vmware.rw_handles [None req-673cd958-9a31-4e2b-aba4-288eb9a7a931 tempest-ServersTestManualDisk-1747286721 tempest-ServersTestManualDisk-1747286721-project-member] Closing write handle for https://esx7c1n1.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/ed91b0c2-226a-4b49-8794-2958a9d7ebab/f684bb17-3ab2-4bd5-a19e-4c36c57d0ebe/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=61962) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:281}} [ 1623.884670] env[61962]: DEBUG nova.network.neutron [None req-7a230fc5-6af3-471e-9f4a-043813914279 tempest-ServerActionsTestJSON-2003961310 tempest-ServerActionsTestJSON-2003961310-project-member] [instance: 5572a1d3-5d2f-4af5-b9f6-ec57506e2069] Updating instance_info_cache with network_info: [] {{(pid=61962) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1623.913661] env[61962]: INFO nova.compute.manager [None req-7a230fc5-6af3-471e-9f4a-043813914279 tempest-ServerActionsTestJSON-2003961310 tempest-ServerActionsTestJSON-2003961310-project-member] [instance: 5572a1d3-5d2f-4af5-b9f6-ec57506e2069] Took 0.45 seconds to deallocate network for instance. [ 1624.044346] env[61962]: INFO nova.scheduler.client.report [None req-7a230fc5-6af3-471e-9f4a-043813914279 tempest-ServerActionsTestJSON-2003961310 tempest-ServerActionsTestJSON-2003961310-project-member] Deleted allocations for instance 5572a1d3-5d2f-4af5-b9f6-ec57506e2069 [ 1624.066876] env[61962]: DEBUG oslo_concurrency.lockutils [None req-7a230fc5-6af3-471e-9f4a-043813914279 tempest-ServerActionsTestJSON-2003961310 tempest-ServerActionsTestJSON-2003961310-project-member] Lock "5572a1d3-5d2f-4af5-b9f6-ec57506e2069" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 676.482s {{(pid=61962) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1624.068040] env[61962]: DEBUG oslo_concurrency.lockutils [None req-fc105c91-72eb-43da-8701-e8664cbbaa11 tempest-ServerActionsTestJSON-2003961310 tempest-ServerActionsTestJSON-2003961310-project-member] Lock "5572a1d3-5d2f-4af5-b9f6-ec57506e2069" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 480.536s {{(pid=61962) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1624.068268] env[61962]: DEBUG oslo_concurrency.lockutils [None req-fc105c91-72eb-43da-8701-e8664cbbaa11 tempest-ServerActionsTestJSON-2003961310 tempest-ServerActionsTestJSON-2003961310-project-member] Acquiring lock "5572a1d3-5d2f-4af5-b9f6-ec57506e2069-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=61962) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1624.068492] env[61962]: DEBUG oslo_concurrency.lockutils [None req-fc105c91-72eb-43da-8701-e8664cbbaa11 tempest-ServerActionsTestJSON-2003961310 tempest-ServerActionsTestJSON-2003961310-project-member] Lock "5572a1d3-5d2f-4af5-b9f6-ec57506e2069-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=61962) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1624.068674] env[61962]: DEBUG oslo_concurrency.lockutils [None req-fc105c91-72eb-43da-8701-e8664cbbaa11 tempest-ServerActionsTestJSON-2003961310 tempest-ServerActionsTestJSON-2003961310-project-member] Lock "5572a1d3-5d2f-4af5-b9f6-ec57506e2069-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=61962) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1624.071840] env[61962]: INFO nova.compute.manager [None req-fc105c91-72eb-43da-8701-e8664cbbaa11 tempest-ServerActionsTestJSON-2003961310 tempest-ServerActionsTestJSON-2003961310-project-member] [instance: 5572a1d3-5d2f-4af5-b9f6-ec57506e2069] Terminating instance [ 1624.073600] env[61962]: DEBUG nova.compute.manager [None req-fc105c91-72eb-43da-8701-e8664cbbaa11 tempest-ServerActionsTestJSON-2003961310 tempest-ServerActionsTestJSON-2003961310-project-member] [instance: 5572a1d3-5d2f-4af5-b9f6-ec57506e2069] Start destroying the instance on the hypervisor. {{(pid=61962) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3142}} [ 1624.073791] env[61962]: DEBUG nova.virt.vmwareapi.vmops [None req-fc105c91-72eb-43da-8701-e8664cbbaa11 tempest-ServerActionsTestJSON-2003961310 tempest-ServerActionsTestJSON-2003961310-project-member] [instance: 5572a1d3-5d2f-4af5-b9f6-ec57506e2069] Destroying instance {{(pid=61962) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1624.074309] env[61962]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-a86455a8-68d9-4c53-9355-807e773b10fb {{(pid=61962) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1624.079963] env[61962]: DEBUG nova.compute.manager [None req-86bf0892-b0db-4db9-87c8-1ce8e652eaff tempest-ServerDiskConfigTestJSON-1190480345 tempest-ServerDiskConfigTestJSON-1190480345-project-member] [instance: d9b93c9e-bc68-4d00-bd14-031dd4b61919] Starting instance... {{(pid=61962) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2424}} [ 1624.106192] env[61962]: DEBUG nova.virt.vmwareapi.vmops [None req-315218da-fc87-4c93-928d-220a133db47c tempest-ServerShowV254Test-238986200 tempest-ServerShowV254Test-238986200-project-member] [instance: d970d822-12ab-43cc-8d34-abc6eff8a68d] Unregistered the VM {{(pid=61962) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 1624.106440] env[61962]: DEBUG nova.virt.vmwareapi.vmops [None req-315218da-fc87-4c93-928d-220a133db47c tempest-ServerShowV254Test-238986200 tempest-ServerShowV254Test-238986200-project-member] [instance: d970d822-12ab-43cc-8d34-abc6eff8a68d] Deleting contents of the VM from datastore datastore2 {{(pid=61962) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 1624.106659] env[61962]: DEBUG nova.virt.vmwareapi.ds_util [None req-315218da-fc87-4c93-928d-220a133db47c tempest-ServerShowV254Test-238986200 tempest-ServerShowV254Test-238986200-project-member] Deleting the datastore file [datastore2] d970d822-12ab-43cc-8d34-abc6eff8a68d {{(pid=61962) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1624.107776] env[61962]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-aaeba74b-0654-49c5-ba91-8f7c00921b44 {{(pid=61962) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1624.110191] env[61962]: DEBUG nova.compute.manager [None req-86bf0892-b0db-4db9-87c8-1ce8e652eaff tempest-ServerDiskConfigTestJSON-1190480345 tempest-ServerDiskConfigTestJSON-1190480345-project-member] [instance: d9b93c9e-bc68-4d00-bd14-031dd4b61919] Instance disappeared before build. {{(pid=61962) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2431}} [ 1624.115943] env[61962]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-23157773-a540-4861-a3c7-f6bd19076130 {{(pid=61962) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1624.128457] env[61962]: DEBUG oslo_vmware.api [None req-315218da-fc87-4c93-928d-220a133db47c tempest-ServerShowV254Test-238986200 tempest-ServerShowV254Test-238986200-project-member] Waiting for the task: (returnval){ [ 1624.128457] env[61962]: value = "task-4892062" [ 1624.128457] env[61962]: _type = "Task" [ 1624.128457] env[61962]: } to complete. {{(pid=61962) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1624.140450] env[61962]: DEBUG oslo_vmware.api [None req-315218da-fc87-4c93-928d-220a133db47c tempest-ServerShowV254Test-238986200 tempest-ServerShowV254Test-238986200-project-member] Task: {'id': task-4892062, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61962) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1624.153765] env[61962]: WARNING nova.virt.vmwareapi.vmops [None req-fc105c91-72eb-43da-8701-e8664cbbaa11 tempest-ServerActionsTestJSON-2003961310 tempest-ServerActionsTestJSON-2003961310-project-member] [instance: 5572a1d3-5d2f-4af5-b9f6-ec57506e2069] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 5572a1d3-5d2f-4af5-b9f6-ec57506e2069 could not be found. [ 1624.153946] env[61962]: DEBUG nova.virt.vmwareapi.vmops [None req-fc105c91-72eb-43da-8701-e8664cbbaa11 tempest-ServerActionsTestJSON-2003961310 tempest-ServerActionsTestJSON-2003961310-project-member] [instance: 5572a1d3-5d2f-4af5-b9f6-ec57506e2069] Instance destroyed {{(pid=61962) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1624.154147] env[61962]: INFO nova.compute.manager [None req-fc105c91-72eb-43da-8701-e8664cbbaa11 tempest-ServerActionsTestJSON-2003961310 tempest-ServerActionsTestJSON-2003961310-project-member] [instance: 5572a1d3-5d2f-4af5-b9f6-ec57506e2069] Took 0.08 seconds to destroy the instance on the hypervisor. [ 1624.154465] env[61962]: DEBUG oslo.service.loopingcall [None req-fc105c91-72eb-43da-8701-e8664cbbaa11 tempest-ServerActionsTestJSON-2003961310 tempest-ServerActionsTestJSON-2003961310-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61962) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1624.155757] env[61962]: DEBUG oslo_concurrency.lockutils [None req-86bf0892-b0db-4db9-87c8-1ce8e652eaff tempest-ServerDiskConfigTestJSON-1190480345 tempest-ServerDiskConfigTestJSON-1190480345-project-member] Lock "d9b93c9e-bc68-4d00-bd14-031dd4b61919" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 202.847s {{(pid=61962) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1624.155977] env[61962]: DEBUG nova.compute.manager [-] [instance: 5572a1d3-5d2f-4af5-b9f6-ec57506e2069] Deallocating network for instance {{(pid=61962) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2281}} [ 1624.156097] env[61962]: DEBUG nova.network.neutron [-] [instance: 5572a1d3-5d2f-4af5-b9f6-ec57506e2069] deallocate_for_instance() {{(pid=61962) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1624.175438] env[61962]: DEBUG nova.compute.manager [None req-8efd776f-39ec-4e55-8eeb-a7bd10a3e451 tempest-ListServerFiltersTestJSON-924365092 tempest-ListServerFiltersTestJSON-924365092-project-member] [instance: 1e71cc99-f026-4197-b7f1-ae18f7f99af6] Starting instance... {{(pid=61962) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2424}} [ 1624.186700] env[61962]: DEBUG nova.network.neutron [-] [instance: 5572a1d3-5d2f-4af5-b9f6-ec57506e2069] Updating instance_info_cache with network_info: [] {{(pid=61962) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1624.199170] env[61962]: INFO nova.compute.manager [-] [instance: 5572a1d3-5d2f-4af5-b9f6-ec57506e2069] Took 0.04 seconds to deallocate network for instance. [ 1624.240268] env[61962]: DEBUG oslo_concurrency.lockutils [None req-8efd776f-39ec-4e55-8eeb-a7bd10a3e451 tempest-ListServerFiltersTestJSON-924365092 tempest-ListServerFiltersTestJSON-924365092-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61962) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1624.240529] env[61962]: DEBUG oslo_concurrency.lockutils [None req-8efd776f-39ec-4e55-8eeb-a7bd10a3e451 tempest-ListServerFiltersTestJSON-924365092 tempest-ListServerFiltersTestJSON-924365092-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=61962) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1624.242034] env[61962]: INFO nova.compute.claims [None req-8efd776f-39ec-4e55-8eeb-a7bd10a3e451 tempest-ListServerFiltersTestJSON-924365092 tempest-ListServerFiltersTestJSON-924365092-project-member] [instance: 1e71cc99-f026-4197-b7f1-ae18f7f99af6] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1624.298150] env[61962]: DEBUG oslo_concurrency.lockutils [None req-fc105c91-72eb-43da-8701-e8664cbbaa11 tempest-ServerActionsTestJSON-2003961310 tempest-ServerActionsTestJSON-2003961310-project-member] Lock "5572a1d3-5d2f-4af5-b9f6-ec57506e2069" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 0.230s {{(pid=61962) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1624.473648] env[61962]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-787d90a0-b766-4933-a21b-25615fdbc8f5 {{(pid=61962) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1624.481522] env[61962]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3374711c-9371-42b1-a644-e00be1b816a8 {{(pid=61962) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1624.513601] env[61962]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e9b3892a-2424-46c3-92f3-a47af1d9ddb2 {{(pid=61962) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1624.522203] env[61962]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-82679c16-569c-4260-a1c1-528a6b547ada {{(pid=61962) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1624.537497] env[61962]: DEBUG nova.compute.provider_tree [None req-8efd776f-39ec-4e55-8eeb-a7bd10a3e451 tempest-ListServerFiltersTestJSON-924365092 tempest-ListServerFiltersTestJSON-924365092-project-member] Inventory has not changed in ProviderTree for provider: 5a20dc57-fddd-49ec-bab5-953a03eebaa1 {{(pid=61962) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1624.569946] env[61962]: DEBUG nova.scheduler.client.report [None req-8efd776f-39ec-4e55-8eeb-a7bd10a3e451 tempest-ListServerFiltersTestJSON-924365092 tempest-ListServerFiltersTestJSON-924365092-project-member] Inventory has not changed for provider 5a20dc57-fddd-49ec-bab5-953a03eebaa1 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 96, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61962) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1624.583706] env[61962]: DEBUG oslo_concurrency.lockutils [None req-8efd776f-39ec-4e55-8eeb-a7bd10a3e451 tempest-ListServerFiltersTestJSON-924365092 tempest-ListServerFiltersTestJSON-924365092-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.343s {{(pid=61962) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1624.584213] env[61962]: DEBUG nova.compute.manager [None req-8efd776f-39ec-4e55-8eeb-a7bd10a3e451 tempest-ListServerFiltersTestJSON-924365092 tempest-ListServerFiltersTestJSON-924365092-project-member] [instance: 1e71cc99-f026-4197-b7f1-ae18f7f99af6] Start building networks asynchronously for instance. {{(pid=61962) _build_resources /opt/stack/nova/nova/compute/manager.py:2821}} [ 1624.617536] env[61962]: DEBUG nova.compute.utils [None req-8efd776f-39ec-4e55-8eeb-a7bd10a3e451 tempest-ListServerFiltersTestJSON-924365092 tempest-ListServerFiltersTestJSON-924365092-project-member] Using /dev/sd instead of None {{(pid=61962) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1624.618790] env[61962]: DEBUG nova.compute.manager [None req-8efd776f-39ec-4e55-8eeb-a7bd10a3e451 tempest-ListServerFiltersTestJSON-924365092 tempest-ListServerFiltersTestJSON-924365092-project-member] [instance: 1e71cc99-f026-4197-b7f1-ae18f7f99af6] Allocating IP information in the background. {{(pid=61962) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1973}} [ 1624.618961] env[61962]: DEBUG nova.network.neutron [None req-8efd776f-39ec-4e55-8eeb-a7bd10a3e451 tempest-ListServerFiltersTestJSON-924365092 tempest-ListServerFiltersTestJSON-924365092-project-member] [instance: 1e71cc99-f026-4197-b7f1-ae18f7f99af6] allocate_for_instance() {{(pid=61962) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 1624.628554] env[61962]: DEBUG nova.compute.manager [None req-8efd776f-39ec-4e55-8eeb-a7bd10a3e451 tempest-ListServerFiltersTestJSON-924365092 tempest-ListServerFiltersTestJSON-924365092-project-member] [instance: 1e71cc99-f026-4197-b7f1-ae18f7f99af6] Start building block device mappings for instance. {{(pid=61962) _build_resources /opt/stack/nova/nova/compute/manager.py:2856}} [ 1624.640833] env[61962]: DEBUG oslo_vmware.api [None req-315218da-fc87-4c93-928d-220a133db47c tempest-ServerShowV254Test-238986200 tempest-ServerShowV254Test-238986200-project-member] Task: {'id': task-4892062, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.039483} completed successfully. {{(pid=61962) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1624.640833] env[61962]: DEBUG nova.virt.vmwareapi.ds_util [None req-315218da-fc87-4c93-928d-220a133db47c tempest-ServerShowV254Test-238986200 tempest-ServerShowV254Test-238986200-project-member] Deleted the datastore file {{(pid=61962) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1624.641147] env[61962]: DEBUG nova.virt.vmwareapi.vmops [None req-315218da-fc87-4c93-928d-220a133db47c tempest-ServerShowV254Test-238986200 tempest-ServerShowV254Test-238986200-project-member] [instance: d970d822-12ab-43cc-8d34-abc6eff8a68d] Deleted contents of the VM from datastore datastore2 {{(pid=61962) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 1624.641147] env[61962]: DEBUG nova.virt.vmwareapi.vmops [None req-315218da-fc87-4c93-928d-220a133db47c tempest-ServerShowV254Test-238986200 tempest-ServerShowV254Test-238986200-project-member] [instance: d970d822-12ab-43cc-8d34-abc6eff8a68d] Instance destroyed {{(pid=61962) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1624.641218] env[61962]: INFO nova.compute.manager [None req-315218da-fc87-4c93-928d-220a133db47c tempest-ServerShowV254Test-238986200 tempest-ServerShowV254Test-238986200-project-member] [instance: d970d822-12ab-43cc-8d34-abc6eff8a68d] Took 1.09 seconds to destroy the instance on the hypervisor. [ 1624.641441] env[61962]: DEBUG oslo.service.loopingcall [None req-315218da-fc87-4c93-928d-220a133db47c tempest-ServerShowV254Test-238986200 tempest-ServerShowV254Test-238986200-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61962) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1624.641644] env[61962]: DEBUG nova.compute.manager [-] [instance: d970d822-12ab-43cc-8d34-abc6eff8a68d] Skipping network deallocation for instance since networking was not requested. {{(pid=61962) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2277}} [ 1624.644476] env[61962]: DEBUG nova.compute.claims [None req-315218da-fc87-4c93-928d-220a133db47c tempest-ServerShowV254Test-238986200 tempest-ServerShowV254Test-238986200-project-member] [instance: d970d822-12ab-43cc-8d34-abc6eff8a68d] Aborting claim: {{(pid=61962) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 1624.644678] env[61962]: DEBUG oslo_concurrency.lockutils [None req-315218da-fc87-4c93-928d-220a133db47c tempest-ServerShowV254Test-238986200 tempest-ServerShowV254Test-238986200-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=61962) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1624.644895] env[61962]: DEBUG oslo_concurrency.lockutils [None req-315218da-fc87-4c93-928d-220a133db47c tempest-ServerShowV254Test-238986200 tempest-ServerShowV254Test-238986200-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 0.000s {{(pid=61962) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1624.677256] env[61962]: DEBUG nova.policy [None req-8efd776f-39ec-4e55-8eeb-a7bd10a3e451 tempest-ListServerFiltersTestJSON-924365092 tempest-ListServerFiltersTestJSON-924365092-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '68caadc6bfc440da9921d99309e9dce0', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '56bb35c701074f02820d93ad5911d089', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61962) authorize /opt/stack/nova/nova/policy.py:203}} [ 1624.692877] env[61962]: DEBUG nova.compute.manager [None req-8efd776f-39ec-4e55-8eeb-a7bd10a3e451 tempest-ListServerFiltersTestJSON-924365092 tempest-ListServerFiltersTestJSON-924365092-project-member] [instance: 1e71cc99-f026-4197-b7f1-ae18f7f99af6] Start spawning the instance on the hypervisor. {{(pid=61962) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2630}} [ 1624.720096] env[61962]: DEBUG nova.virt.hardware [None req-8efd776f-39ec-4e55-8eeb-a7bd10a3e451 tempest-ListServerFiltersTestJSON-924365092 tempest-ListServerFiltersTestJSON-924365092-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-12-01T12:09:23Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=128,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-12-01T12:09:07Z,direct_url=,disk_format='vmdk',id=f684bb17-3ab2-4bd5-a19e-4c36c57d0ebe,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='eef556fb5c4f49b889491ae31a496de5',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-12-01T12:09:08Z,virtual_size=,visibility=), allow threads: False {{(pid=61962) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1624.720342] env[61962]: DEBUG nova.virt.hardware [None req-8efd776f-39ec-4e55-8eeb-a7bd10a3e451 tempest-ListServerFiltersTestJSON-924365092 tempest-ListServerFiltersTestJSON-924365092-project-member] Flavor limits 0:0:0 {{(pid=61962) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1624.720499] env[61962]: DEBUG nova.virt.hardware [None req-8efd776f-39ec-4e55-8eeb-a7bd10a3e451 tempest-ListServerFiltersTestJSON-924365092 tempest-ListServerFiltersTestJSON-924365092-project-member] Image limits 0:0:0 {{(pid=61962) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1624.720686] env[61962]: DEBUG nova.virt.hardware [None req-8efd776f-39ec-4e55-8eeb-a7bd10a3e451 tempest-ListServerFiltersTestJSON-924365092 tempest-ListServerFiltersTestJSON-924365092-project-member] Flavor pref 0:0:0 {{(pid=61962) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1624.720861] env[61962]: DEBUG nova.virt.hardware [None req-8efd776f-39ec-4e55-8eeb-a7bd10a3e451 tempest-ListServerFiltersTestJSON-924365092 tempest-ListServerFiltersTestJSON-924365092-project-member] Image pref 0:0:0 {{(pid=61962) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1624.721018] env[61962]: DEBUG nova.virt.hardware [None req-8efd776f-39ec-4e55-8eeb-a7bd10a3e451 tempest-ListServerFiltersTestJSON-924365092 tempest-ListServerFiltersTestJSON-924365092-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61962) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1624.721235] env[61962]: DEBUG nova.virt.hardware [None req-8efd776f-39ec-4e55-8eeb-a7bd10a3e451 tempest-ListServerFiltersTestJSON-924365092 tempest-ListServerFiltersTestJSON-924365092-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61962) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1624.721396] env[61962]: DEBUG nova.virt.hardware [None req-8efd776f-39ec-4e55-8eeb-a7bd10a3e451 tempest-ListServerFiltersTestJSON-924365092 tempest-ListServerFiltersTestJSON-924365092-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61962) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1624.721562] env[61962]: DEBUG nova.virt.hardware [None req-8efd776f-39ec-4e55-8eeb-a7bd10a3e451 tempest-ListServerFiltersTestJSON-924365092 tempest-ListServerFiltersTestJSON-924365092-project-member] Got 1 possible topologies {{(pid=61962) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1624.721854] env[61962]: DEBUG nova.virt.hardware [None req-8efd776f-39ec-4e55-8eeb-a7bd10a3e451 tempest-ListServerFiltersTestJSON-924365092 tempest-ListServerFiltersTestJSON-924365092-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61962) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1624.721923] env[61962]: DEBUG nova.virt.hardware [None req-8efd776f-39ec-4e55-8eeb-a7bd10a3e451 tempest-ListServerFiltersTestJSON-924365092 tempest-ListServerFiltersTestJSON-924365092-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61962) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1624.722871] env[61962]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8ccf9508-cedd-425a-b48a-45e107e7cdb2 {{(pid=61962) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1624.733818] env[61962]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-36114328-9026-4aa3-aa9b-462764aea7e3 {{(pid=61962) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1624.876475] env[61962]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-278625e6-32bc-4fdb-848f-59a06da6a3b7 {{(pid=61962) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1624.884651] env[61962]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-123610c2-68d8-4c77-b688-bea41a16a18c {{(pid=61962) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1624.915303] env[61962]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-33c8843e-830b-403d-b63a-613c529bbf6b {{(pid=61962) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1624.923205] env[61962]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1a337242-d82b-4f52-9f3f-f08f122a3bba {{(pid=61962) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1624.939165] env[61962]: DEBUG nova.compute.provider_tree [None req-315218da-fc87-4c93-928d-220a133db47c tempest-ServerShowV254Test-238986200 tempest-ServerShowV254Test-238986200-project-member] Inventory has not changed in ProviderTree for provider: 5a20dc57-fddd-49ec-bab5-953a03eebaa1 {{(pid=61962) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1624.948758] env[61962]: DEBUG nova.scheduler.client.report [None req-315218da-fc87-4c93-928d-220a133db47c tempest-ServerShowV254Test-238986200 tempest-ServerShowV254Test-238986200-project-member] Inventory has not changed for provider 5a20dc57-fddd-49ec-bab5-953a03eebaa1 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 96, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61962) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1624.976773] env[61962]: DEBUG oslo_concurrency.lockutils [None req-315218da-fc87-4c93-928d-220a133db47c tempest-ServerShowV254Test-238986200 tempest-ServerShowV254Test-238986200-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 0.332s {{(pid=61962) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1624.977619] env[61962]: ERROR nova.compute.manager [None req-315218da-fc87-4c93-928d-220a133db47c tempest-ServerShowV254Test-238986200 tempest-ServerShowV254Test-238986200-project-member] [instance: d970d822-12ab-43cc-8d34-abc6eff8a68d] Failed to build and run instance: nova.exception.ImageNotAuthorized: Not authorized for image f684bb17-3ab2-4bd5-a19e-4c36c57d0ebe. [ 1624.977619] env[61962]: ERROR nova.compute.manager [instance: d970d822-12ab-43cc-8d34-abc6eff8a68d] Traceback (most recent call last): [ 1624.977619] env[61962]: ERROR nova.compute.manager [instance: d970d822-12ab-43cc-8d34-abc6eff8a68d] File "/opt/stack/nova/nova/image/glance.py", line 285, in show [ 1624.977619] env[61962]: ERROR nova.compute.manager [instance: d970d822-12ab-43cc-8d34-abc6eff8a68d] image = self._client.call(context, 2, 'get', args=(image_id,)) [ 1624.977619] env[61962]: ERROR nova.compute.manager [instance: d970d822-12ab-43cc-8d34-abc6eff8a68d] File "/opt/stack/nova/nova/image/glance.py", line 191, in call [ 1624.977619] env[61962]: ERROR nova.compute.manager [instance: d970d822-12ab-43cc-8d34-abc6eff8a68d] result = getattr(controller, method)(*args, **kwargs) [ 1624.977619] env[61962]: ERROR nova.compute.manager [instance: d970d822-12ab-43cc-8d34-abc6eff8a68d] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/v2/images.py", line 197, in get [ 1624.977619] env[61962]: ERROR nova.compute.manager [instance: d970d822-12ab-43cc-8d34-abc6eff8a68d] return self._get(image_id) [ 1624.977619] env[61962]: ERROR nova.compute.manager [instance: d970d822-12ab-43cc-8d34-abc6eff8a68d] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/common/utils.py", line 649, in inner [ 1624.977619] env[61962]: ERROR nova.compute.manager [instance: d970d822-12ab-43cc-8d34-abc6eff8a68d] return RequestIdProxy(wrapped(*args, **kwargs)) [ 1624.977619] env[61962]: ERROR nova.compute.manager [instance: d970d822-12ab-43cc-8d34-abc6eff8a68d] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/v2/images.py", line 190, in _get [ 1624.977998] env[61962]: ERROR nova.compute.manager [instance: d970d822-12ab-43cc-8d34-abc6eff8a68d] resp, body = self.http_client.get(url, headers=header) [ 1624.977998] env[61962]: ERROR nova.compute.manager [instance: d970d822-12ab-43cc-8d34-abc6eff8a68d] File "/opt/stack/data/venv/lib/python3.10/site-packages/keystoneauth1/adapter.py", line 393, in get [ 1624.977998] env[61962]: ERROR nova.compute.manager [instance: d970d822-12ab-43cc-8d34-abc6eff8a68d] return self.request(url, 'GET', **kwargs) [ 1624.977998] env[61962]: ERROR nova.compute.manager [instance: d970d822-12ab-43cc-8d34-abc6eff8a68d] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/common/http.py", line 380, in request [ 1624.977998] env[61962]: ERROR nova.compute.manager [instance: d970d822-12ab-43cc-8d34-abc6eff8a68d] return self._handle_response(resp) [ 1624.977998] env[61962]: ERROR nova.compute.manager [instance: d970d822-12ab-43cc-8d34-abc6eff8a68d] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/common/http.py", line 120, in _handle_response [ 1624.977998] env[61962]: ERROR nova.compute.manager [instance: d970d822-12ab-43cc-8d34-abc6eff8a68d] raise exc.from_response(resp, resp.content) [ 1624.977998] env[61962]: ERROR nova.compute.manager [instance: d970d822-12ab-43cc-8d34-abc6eff8a68d] glanceclient.exc.HTTPUnauthorized: HTTP 401 Unauthorized: This server could not verify that you are authorized to access the document you requested. Either you supplied the wrong credentials (e.g., bad password), or your browser does not understand how to supply the credentials required. [ 1624.977998] env[61962]: ERROR nova.compute.manager [instance: d970d822-12ab-43cc-8d34-abc6eff8a68d] [ 1624.977998] env[61962]: ERROR nova.compute.manager [instance: d970d822-12ab-43cc-8d34-abc6eff8a68d] During handling of the above exception, another exception occurred: [ 1624.977998] env[61962]: ERROR nova.compute.manager [instance: d970d822-12ab-43cc-8d34-abc6eff8a68d] [ 1624.977998] env[61962]: ERROR nova.compute.manager [instance: d970d822-12ab-43cc-8d34-abc6eff8a68d] Traceback (most recent call last): [ 1624.978336] env[61962]: ERROR nova.compute.manager [instance: d970d822-12ab-43cc-8d34-abc6eff8a68d] File "/opt/stack/nova/nova/compute/manager.py", line 2633, in _build_and_run_instance [ 1624.978336] env[61962]: ERROR nova.compute.manager [instance: d970d822-12ab-43cc-8d34-abc6eff8a68d] self.driver.spawn(context, instance, image_meta, [ 1624.978336] env[61962]: ERROR nova.compute.manager [instance: d970d822-12ab-43cc-8d34-abc6eff8a68d] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 1624.978336] env[61962]: ERROR nova.compute.manager [instance: d970d822-12ab-43cc-8d34-abc6eff8a68d] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1624.978336] env[61962]: ERROR nova.compute.manager [instance: d970d822-12ab-43cc-8d34-abc6eff8a68d] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 1624.978336] env[61962]: ERROR nova.compute.manager [instance: d970d822-12ab-43cc-8d34-abc6eff8a68d] self._fetch_image_if_missing(context, vi) [ 1624.978336] env[61962]: ERROR nova.compute.manager [instance: d970d822-12ab-43cc-8d34-abc6eff8a68d] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 637, in _fetch_image_if_missing [ 1624.978336] env[61962]: ERROR nova.compute.manager [instance: d970d822-12ab-43cc-8d34-abc6eff8a68d] image_fetch(context, vi, tmp_image_ds_loc) [ 1624.978336] env[61962]: ERROR nova.compute.manager [instance: d970d822-12ab-43cc-8d34-abc6eff8a68d] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 420, in _fetch_image_as_file [ 1624.978336] env[61962]: ERROR nova.compute.manager [instance: d970d822-12ab-43cc-8d34-abc6eff8a68d] images.fetch_image( [ 1624.978336] env[61962]: ERROR nova.compute.manager [instance: d970d822-12ab-43cc-8d34-abc6eff8a68d] File "/opt/stack/nova/nova/virt/vmwareapi/images.py", line 251, in fetch_image [ 1624.978336] env[61962]: ERROR nova.compute.manager [instance: d970d822-12ab-43cc-8d34-abc6eff8a68d] metadata = IMAGE_API.get(context, image_ref) [ 1624.978336] env[61962]: ERROR nova.compute.manager [instance: d970d822-12ab-43cc-8d34-abc6eff8a68d] File "/opt/stack/nova/nova/image/glance.py", line 1206, in get [ 1624.978683] env[61962]: ERROR nova.compute.manager [instance: d970d822-12ab-43cc-8d34-abc6eff8a68d] return session.show(context, image_id, [ 1624.978683] env[61962]: ERROR nova.compute.manager [instance: d970d822-12ab-43cc-8d34-abc6eff8a68d] File "/opt/stack/nova/nova/image/glance.py", line 287, in show [ 1624.978683] env[61962]: ERROR nova.compute.manager [instance: d970d822-12ab-43cc-8d34-abc6eff8a68d] _reraise_translated_image_exception(image_id) [ 1624.978683] env[61962]: ERROR nova.compute.manager [instance: d970d822-12ab-43cc-8d34-abc6eff8a68d] File "/opt/stack/nova/nova/image/glance.py", line 1032, in _reraise_translated_image_exception [ 1624.978683] env[61962]: ERROR nova.compute.manager [instance: d970d822-12ab-43cc-8d34-abc6eff8a68d] raise new_exc.with_traceback(exc_trace) [ 1624.978683] env[61962]: ERROR nova.compute.manager [instance: d970d822-12ab-43cc-8d34-abc6eff8a68d] File "/opt/stack/nova/nova/image/glance.py", line 285, in show [ 1624.978683] env[61962]: ERROR nova.compute.manager [instance: d970d822-12ab-43cc-8d34-abc6eff8a68d] image = self._client.call(context, 2, 'get', args=(image_id,)) [ 1624.978683] env[61962]: ERROR nova.compute.manager [instance: d970d822-12ab-43cc-8d34-abc6eff8a68d] File "/opt/stack/nova/nova/image/glance.py", line 191, in call [ 1624.978683] env[61962]: ERROR nova.compute.manager [instance: d970d822-12ab-43cc-8d34-abc6eff8a68d] result = getattr(controller, method)(*args, **kwargs) [ 1624.978683] env[61962]: ERROR nova.compute.manager [instance: d970d822-12ab-43cc-8d34-abc6eff8a68d] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/v2/images.py", line 197, in get [ 1624.978683] env[61962]: ERROR nova.compute.manager [instance: d970d822-12ab-43cc-8d34-abc6eff8a68d] return self._get(image_id) [ 1624.978683] env[61962]: ERROR nova.compute.manager [instance: d970d822-12ab-43cc-8d34-abc6eff8a68d] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/common/utils.py", line 649, in inner [ 1624.978683] env[61962]: ERROR nova.compute.manager [instance: d970d822-12ab-43cc-8d34-abc6eff8a68d] return RequestIdProxy(wrapped(*args, **kwargs)) [ 1624.979061] env[61962]: ERROR nova.compute.manager [instance: d970d822-12ab-43cc-8d34-abc6eff8a68d] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/v2/images.py", line 190, in _get [ 1624.979061] env[61962]: ERROR nova.compute.manager [instance: d970d822-12ab-43cc-8d34-abc6eff8a68d] resp, body = self.http_client.get(url, headers=header) [ 1624.979061] env[61962]: ERROR nova.compute.manager [instance: d970d822-12ab-43cc-8d34-abc6eff8a68d] File "/opt/stack/data/venv/lib/python3.10/site-packages/keystoneauth1/adapter.py", line 393, in get [ 1624.979061] env[61962]: ERROR nova.compute.manager [instance: d970d822-12ab-43cc-8d34-abc6eff8a68d] return self.request(url, 'GET', **kwargs) [ 1624.979061] env[61962]: ERROR nova.compute.manager [instance: d970d822-12ab-43cc-8d34-abc6eff8a68d] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/common/http.py", line 380, in request [ 1624.979061] env[61962]: ERROR nova.compute.manager [instance: d970d822-12ab-43cc-8d34-abc6eff8a68d] return self._handle_response(resp) [ 1624.979061] env[61962]: ERROR nova.compute.manager [instance: d970d822-12ab-43cc-8d34-abc6eff8a68d] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/common/http.py", line 120, in _handle_response [ 1624.979061] env[61962]: ERROR nova.compute.manager [instance: d970d822-12ab-43cc-8d34-abc6eff8a68d] raise exc.from_response(resp, resp.content) [ 1624.979061] env[61962]: ERROR nova.compute.manager [instance: d970d822-12ab-43cc-8d34-abc6eff8a68d] nova.exception.ImageNotAuthorized: Not authorized for image f684bb17-3ab2-4bd5-a19e-4c36c57d0ebe. [ 1624.979061] env[61962]: ERROR nova.compute.manager [instance: d970d822-12ab-43cc-8d34-abc6eff8a68d] [ 1624.979061] env[61962]: DEBUG nova.compute.utils [None req-315218da-fc87-4c93-928d-220a133db47c tempest-ServerShowV254Test-238986200 tempest-ServerShowV254Test-238986200-project-member] [instance: d970d822-12ab-43cc-8d34-abc6eff8a68d] Not authorized for image f684bb17-3ab2-4bd5-a19e-4c36c57d0ebe. {{(pid=61962) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 1624.979950] env[61962]: DEBUG nova.compute.manager [None req-315218da-fc87-4c93-928d-220a133db47c tempest-ServerShowV254Test-238986200 tempest-ServerShowV254Test-238986200-project-member] [instance: d970d822-12ab-43cc-8d34-abc6eff8a68d] Build of instance d970d822-12ab-43cc-8d34-abc6eff8a68d was re-scheduled: Not authorized for image f684bb17-3ab2-4bd5-a19e-4c36c57d0ebe. {{(pid=61962) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2472}} [ 1624.980403] env[61962]: DEBUG nova.compute.manager [None req-315218da-fc87-4c93-928d-220a133db47c tempest-ServerShowV254Test-238986200 tempest-ServerShowV254Test-238986200-project-member] [instance: d970d822-12ab-43cc-8d34-abc6eff8a68d] Unplugging VIFs for instance {{(pid=61962) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:2998}} [ 1624.980624] env[61962]: DEBUG oslo_concurrency.lockutils [None req-315218da-fc87-4c93-928d-220a133db47c tempest-ServerShowV254Test-238986200 tempest-ServerShowV254Test-238986200-project-member] Acquiring lock "refresh_cache-d970d822-12ab-43cc-8d34-abc6eff8a68d" {{(pid=61962) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1624.980803] env[61962]: DEBUG oslo_concurrency.lockutils [None req-315218da-fc87-4c93-928d-220a133db47c tempest-ServerShowV254Test-238986200 tempest-ServerShowV254Test-238986200-project-member] Acquired lock "refresh_cache-d970d822-12ab-43cc-8d34-abc6eff8a68d" {{(pid=61962) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1624.980970] env[61962]: DEBUG nova.network.neutron [None req-315218da-fc87-4c93-928d-220a133db47c tempest-ServerShowV254Test-238986200 tempest-ServerShowV254Test-238986200-project-member] [instance: d970d822-12ab-43cc-8d34-abc6eff8a68d] Building network info cache for instance {{(pid=61962) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 1625.008372] env[61962]: DEBUG nova.network.neutron [None req-315218da-fc87-4c93-928d-220a133db47c tempest-ServerShowV254Test-238986200 tempest-ServerShowV254Test-238986200-project-member] [instance: d970d822-12ab-43cc-8d34-abc6eff8a68d] Instance cache missing network info. {{(pid=61962) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 1625.096639] env[61962]: DEBUG nova.network.neutron [None req-315218da-fc87-4c93-928d-220a133db47c tempest-ServerShowV254Test-238986200 tempest-ServerShowV254Test-238986200-project-member] [instance: d970d822-12ab-43cc-8d34-abc6eff8a68d] Updating instance_info_cache with network_info: [] {{(pid=61962) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1625.110580] env[61962]: DEBUG oslo_concurrency.lockutils [None req-315218da-fc87-4c93-928d-220a133db47c tempest-ServerShowV254Test-238986200 tempest-ServerShowV254Test-238986200-project-member] Releasing lock "refresh_cache-d970d822-12ab-43cc-8d34-abc6eff8a68d" {{(pid=61962) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1625.110814] env[61962]: DEBUG nova.compute.manager [None req-315218da-fc87-4c93-928d-220a133db47c tempest-ServerShowV254Test-238986200 tempest-ServerShowV254Test-238986200-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=61962) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3021}} [ 1625.111115] env[61962]: DEBUG nova.compute.manager [None req-315218da-fc87-4c93-928d-220a133db47c tempest-ServerShowV254Test-238986200 tempest-ServerShowV254Test-238986200-project-member] [instance: d970d822-12ab-43cc-8d34-abc6eff8a68d] Skipping network deallocation for instance since networking was not requested. {{(pid=61962) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2277}} [ 1625.114478] env[61962]: DEBUG nova.network.neutron [None req-8efd776f-39ec-4e55-8eeb-a7bd10a3e451 tempest-ListServerFiltersTestJSON-924365092 tempest-ListServerFiltersTestJSON-924365092-project-member] [instance: 1e71cc99-f026-4197-b7f1-ae18f7f99af6] Successfully created port: 28222adf-fbb7-4199-a9e2-1f3e4d389e4d {{(pid=61962) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1625.231321] env[61962]: INFO nova.scheduler.client.report [None req-315218da-fc87-4c93-928d-220a133db47c tempest-ServerShowV254Test-238986200 tempest-ServerShowV254Test-238986200-project-member] Deleted allocations for instance d970d822-12ab-43cc-8d34-abc6eff8a68d [ 1625.251558] env[61962]: DEBUG oslo_concurrency.lockutils [None req-315218da-fc87-4c93-928d-220a133db47c tempest-ServerShowV254Test-238986200 tempest-ServerShowV254Test-238986200-project-member] Lock "d970d822-12ab-43cc-8d34-abc6eff8a68d" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 625.861s {{(pid=61962) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1625.252711] env[61962]: DEBUG oslo_concurrency.lockutils [None req-e6a2f4e5-1dbe-4445-8444-ef14363e60d0 tempest-ServerShowV254Test-238986200 tempest-ServerShowV254Test-238986200-project-member] Lock "d970d822-12ab-43cc-8d34-abc6eff8a68d" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 429.841s {{(pid=61962) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1625.252926] env[61962]: DEBUG oslo_concurrency.lockutils [None req-e6a2f4e5-1dbe-4445-8444-ef14363e60d0 tempest-ServerShowV254Test-238986200 tempest-ServerShowV254Test-238986200-project-member] Acquiring lock "d970d822-12ab-43cc-8d34-abc6eff8a68d-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=61962) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1625.253143] env[61962]: DEBUG oslo_concurrency.lockutils [None req-e6a2f4e5-1dbe-4445-8444-ef14363e60d0 tempest-ServerShowV254Test-238986200 tempest-ServerShowV254Test-238986200-project-member] Lock "d970d822-12ab-43cc-8d34-abc6eff8a68d-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=61962) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1625.253309] env[61962]: DEBUG oslo_concurrency.lockutils [None req-e6a2f4e5-1dbe-4445-8444-ef14363e60d0 tempest-ServerShowV254Test-238986200 tempest-ServerShowV254Test-238986200-project-member] Lock "d970d822-12ab-43cc-8d34-abc6eff8a68d-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=61962) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1625.256861] env[61962]: INFO nova.compute.manager [None req-e6a2f4e5-1dbe-4445-8444-ef14363e60d0 tempest-ServerShowV254Test-238986200 tempest-ServerShowV254Test-238986200-project-member] [instance: d970d822-12ab-43cc-8d34-abc6eff8a68d] Terminating instance [ 1625.258472] env[61962]: DEBUG oslo_concurrency.lockutils [None req-e6a2f4e5-1dbe-4445-8444-ef14363e60d0 tempest-ServerShowV254Test-238986200 tempest-ServerShowV254Test-238986200-project-member] Acquiring lock "refresh_cache-d970d822-12ab-43cc-8d34-abc6eff8a68d" {{(pid=61962) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1625.258629] env[61962]: DEBUG oslo_concurrency.lockutils [None req-e6a2f4e5-1dbe-4445-8444-ef14363e60d0 tempest-ServerShowV254Test-238986200 tempest-ServerShowV254Test-238986200-project-member] Acquired lock "refresh_cache-d970d822-12ab-43cc-8d34-abc6eff8a68d" {{(pid=61962) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1625.258792] env[61962]: DEBUG nova.network.neutron [None req-e6a2f4e5-1dbe-4445-8444-ef14363e60d0 tempest-ServerShowV254Test-238986200 tempest-ServerShowV254Test-238986200-project-member] [instance: d970d822-12ab-43cc-8d34-abc6eff8a68d] Building network info cache for instance {{(pid=61962) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 1625.269480] env[61962]: DEBUG nova.compute.manager [None req-55c96cbf-6671-47d9-82d1-1db28c753841 tempest-ListServerFiltersTestJSON-924365092 tempest-ListServerFiltersTestJSON-924365092-project-member] [instance: 2cf5409f-eaff-4b63-b8bb-eb15a7a81760] Starting instance... {{(pid=61962) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2424}} [ 1625.293176] env[61962]: DEBUG nova.network.neutron [None req-e6a2f4e5-1dbe-4445-8444-ef14363e60d0 tempest-ServerShowV254Test-238986200 tempest-ServerShowV254Test-238986200-project-member] [instance: d970d822-12ab-43cc-8d34-abc6eff8a68d] Instance cache missing network info. {{(pid=61962) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 1625.334990] env[61962]: DEBUG oslo_concurrency.lockutils [None req-55c96cbf-6671-47d9-82d1-1db28c753841 tempest-ListServerFiltersTestJSON-924365092 tempest-ListServerFiltersTestJSON-924365092-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61962) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1625.335257] env[61962]: DEBUG oslo_concurrency.lockutils [None req-55c96cbf-6671-47d9-82d1-1db28c753841 tempest-ListServerFiltersTestJSON-924365092 tempest-ListServerFiltersTestJSON-924365092-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=61962) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1625.336755] env[61962]: INFO nova.compute.claims [None req-55c96cbf-6671-47d9-82d1-1db28c753841 tempest-ListServerFiltersTestJSON-924365092 tempest-ListServerFiltersTestJSON-924365092-project-member] [instance: 2cf5409f-eaff-4b63-b8bb-eb15a7a81760] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1625.416357] env[61962]: DEBUG nova.network.neutron [None req-e6a2f4e5-1dbe-4445-8444-ef14363e60d0 tempest-ServerShowV254Test-238986200 tempest-ServerShowV254Test-238986200-project-member] [instance: d970d822-12ab-43cc-8d34-abc6eff8a68d] Updating instance_info_cache with network_info: [] {{(pid=61962) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1625.427302] env[61962]: DEBUG oslo_concurrency.lockutils [None req-e6a2f4e5-1dbe-4445-8444-ef14363e60d0 tempest-ServerShowV254Test-238986200 tempest-ServerShowV254Test-238986200-project-member] Releasing lock "refresh_cache-d970d822-12ab-43cc-8d34-abc6eff8a68d" {{(pid=61962) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1625.427750] env[61962]: DEBUG nova.compute.manager [None req-e6a2f4e5-1dbe-4445-8444-ef14363e60d0 tempest-ServerShowV254Test-238986200 tempest-ServerShowV254Test-238986200-project-member] [instance: d970d822-12ab-43cc-8d34-abc6eff8a68d] Start destroying the instance on the hypervisor. {{(pid=61962) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3142}} [ 1625.427944] env[61962]: DEBUG nova.virt.vmwareapi.vmops [None req-e6a2f4e5-1dbe-4445-8444-ef14363e60d0 tempest-ServerShowV254Test-238986200 tempest-ServerShowV254Test-238986200-project-member] [instance: d970d822-12ab-43cc-8d34-abc6eff8a68d] Destroying instance {{(pid=61962) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1625.428667] env[61962]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-b2adfeab-cc58-4022-af94-8bd8a36a31e3 {{(pid=61962) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1625.442837] env[61962]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-26c8c199-48de-4477-8ad6-e95c9af50b1d {{(pid=61962) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1625.484063] env[61962]: WARNING nova.virt.vmwareapi.vmops [None req-e6a2f4e5-1dbe-4445-8444-ef14363e60d0 tempest-ServerShowV254Test-238986200 tempest-ServerShowV254Test-238986200-project-member] [instance: d970d822-12ab-43cc-8d34-abc6eff8a68d] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance d970d822-12ab-43cc-8d34-abc6eff8a68d could not be found. [ 1625.484063] env[61962]: DEBUG nova.virt.vmwareapi.vmops [None req-e6a2f4e5-1dbe-4445-8444-ef14363e60d0 tempest-ServerShowV254Test-238986200 tempest-ServerShowV254Test-238986200-project-member] [instance: d970d822-12ab-43cc-8d34-abc6eff8a68d] Instance destroyed {{(pid=61962) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1625.484063] env[61962]: INFO nova.compute.manager [None req-e6a2f4e5-1dbe-4445-8444-ef14363e60d0 tempest-ServerShowV254Test-238986200 tempest-ServerShowV254Test-238986200-project-member] [instance: d970d822-12ab-43cc-8d34-abc6eff8a68d] Took 0.06 seconds to destroy the instance on the hypervisor. [ 1625.484316] env[61962]: DEBUG oslo.service.loopingcall [None req-e6a2f4e5-1dbe-4445-8444-ef14363e60d0 tempest-ServerShowV254Test-238986200 tempest-ServerShowV254Test-238986200-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61962) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1625.486990] env[61962]: DEBUG nova.compute.manager [-] [instance: d970d822-12ab-43cc-8d34-abc6eff8a68d] Deallocating network for instance {{(pid=61962) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2281}} [ 1625.487125] env[61962]: DEBUG nova.network.neutron [-] [instance: d970d822-12ab-43cc-8d34-abc6eff8a68d] deallocate_for_instance() {{(pid=61962) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1625.593571] env[61962]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f2a0e602-a3dc-4e48-af9e-45134a0d4054 {{(pid=61962) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1625.601193] env[61962]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d461868a-90ef-4f89-9fd9-6b43915c8eb6 {{(pid=61962) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1625.640277] env[61962]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b6c1653c-6da2-46a7-bfd2-9e6d29be545e {{(pid=61962) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1625.643089] env[61962]: DEBUG neutronclient.v2_0.client [-] Error message: {"error": {"code": 401, "title": "Unauthorized", "message": "The request you have made requires authentication."}} {{(pid=61962) _handle_fault_response /opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py:262}} [ 1625.643338] env[61962]: ERROR nova.network.neutron [-] Neutron client was not able to generate a valid admin token, please verify Neutron admin credential located in nova.conf: neutronclient.common.exceptions.Unauthorized: 401-{'error': {'code': 401, 'title': 'Unauthorized', 'message': 'The request you have made requires authentication.'}} [ 1625.644863] env[61962]: ERROR oslo.service.loopingcall [-] Dynamic interval looping call 'oslo_service.loopingcall.RetryDecorator.__call__.._func' failed: nova.exception.NeutronAdminCredentialConfigurationInvalid: Networking client is experiencing an unauthorized exception. [ 1625.644863] env[61962]: ERROR oslo.service.loopingcall Traceback (most recent call last): [ 1625.644863] env[61962]: ERROR oslo.service.loopingcall File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1625.644863] env[61962]: ERROR oslo.service.loopingcall ret = obj(*args, **kwargs) [ 1625.644863] env[61962]: ERROR oslo.service.loopingcall File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 272, in _handle_fault_response [ 1625.644863] env[61962]: ERROR oslo.service.loopingcall exception_handler_v20(status_code, error_body) [ 1625.644863] env[61962]: ERROR oslo.service.loopingcall File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 90, in exception_handler_v20 [ 1625.644863] env[61962]: ERROR oslo.service.loopingcall raise client_exc(message=error_message, [ 1625.644863] env[61962]: ERROR oslo.service.loopingcall neutronclient.common.exceptions.Unauthorized: 401-{'error': {'code': 401, 'title': 'Unauthorized', 'message': 'The request you have made requires authentication.'}} [ 1625.644863] env[61962]: ERROR oslo.service.loopingcall Neutron server returns request_ids: ['req-2657af99-2fe1-4370-8454-3dd8be141cc1'] [ 1625.644863] env[61962]: ERROR oslo.service.loopingcall [ 1625.644863] env[61962]: ERROR oslo.service.loopingcall During handling of the above exception, another exception occurred: [ 1625.644863] env[61962]: ERROR oslo.service.loopingcall [ 1625.644863] env[61962]: ERROR oslo.service.loopingcall Traceback (most recent call last): [ 1625.644863] env[61962]: ERROR oslo.service.loopingcall File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py", line 150, in _run_loop [ 1625.644863] env[61962]: ERROR oslo.service.loopingcall result = func(*self.args, **self.kw) [ 1625.645510] env[61962]: ERROR oslo.service.loopingcall File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py", line 407, in _func [ 1625.645510] env[61962]: ERROR oslo.service.loopingcall result = f(*args, **kwargs) [ 1625.645510] env[61962]: ERROR oslo.service.loopingcall File "/opt/stack/nova/nova/compute/manager.py", line 3063, in _deallocate_network_with_retries [ 1625.645510] env[61962]: ERROR oslo.service.loopingcall self._deallocate_network( [ 1625.645510] env[61962]: ERROR oslo.service.loopingcall File "/opt/stack/nova/nova/compute/manager.py", line 2283, in _deallocate_network [ 1625.645510] env[61962]: ERROR oslo.service.loopingcall self.network_api.deallocate_for_instance( [ 1625.645510] env[61962]: ERROR oslo.service.loopingcall File "/opt/stack/nova/nova/network/neutron.py", line 1806, in deallocate_for_instance [ 1625.645510] env[61962]: ERROR oslo.service.loopingcall data = neutron.list_ports(**search_opts) [ 1625.645510] env[61962]: ERROR oslo.service.loopingcall File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1625.645510] env[61962]: ERROR oslo.service.loopingcall ret = obj(*args, **kwargs) [ 1625.645510] env[61962]: ERROR oslo.service.loopingcall File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 815, in list_ports [ 1625.645510] env[61962]: ERROR oslo.service.loopingcall return self.list('ports', self.ports_path, retrieve_all, [ 1625.645510] env[61962]: ERROR oslo.service.loopingcall File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1625.645510] env[61962]: ERROR oslo.service.loopingcall ret = obj(*args, **kwargs) [ 1625.645510] env[61962]: ERROR oslo.service.loopingcall File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 372, in list [ 1625.645510] env[61962]: ERROR oslo.service.loopingcall for r in self._pagination(collection, path, **params): [ 1625.645510] env[61962]: ERROR oslo.service.loopingcall File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 387, in _pagination [ 1625.645510] env[61962]: ERROR oslo.service.loopingcall res = self.get(path, params=params) [ 1625.646018] env[61962]: ERROR oslo.service.loopingcall File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1625.646018] env[61962]: ERROR oslo.service.loopingcall ret = obj(*args, **kwargs) [ 1625.646018] env[61962]: ERROR oslo.service.loopingcall File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 356, in get [ 1625.646018] env[61962]: ERROR oslo.service.loopingcall return self.retry_request("GET", action, body=body, [ 1625.646018] env[61962]: ERROR oslo.service.loopingcall File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1625.646018] env[61962]: ERROR oslo.service.loopingcall ret = obj(*args, **kwargs) [ 1625.646018] env[61962]: ERROR oslo.service.loopingcall File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 333, in retry_request [ 1625.646018] env[61962]: ERROR oslo.service.loopingcall return self.do_request(method, action, body=body, [ 1625.646018] env[61962]: ERROR oslo.service.loopingcall File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1625.646018] env[61962]: ERROR oslo.service.loopingcall ret = obj(*args, **kwargs) [ 1625.646018] env[61962]: ERROR oslo.service.loopingcall File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 297, in do_request [ 1625.646018] env[61962]: ERROR oslo.service.loopingcall self._handle_fault_response(status_code, replybody, resp) [ 1625.646018] env[61962]: ERROR oslo.service.loopingcall File "/opt/stack/nova/nova/network/neutron.py", line 212, in wrapper [ 1625.646018] env[61962]: ERROR oslo.service.loopingcall raise exception.NeutronAdminCredentialConfigurationInvalid() [ 1625.646018] env[61962]: ERROR oslo.service.loopingcall nova.exception.NeutronAdminCredentialConfigurationInvalid: Networking client is experiencing an unauthorized exception. [ 1625.646018] env[61962]: ERROR oslo.service.loopingcall [ 1625.646479] env[61962]: ERROR nova.compute.manager [None req-e6a2f4e5-1dbe-4445-8444-ef14363e60d0 tempest-ServerShowV254Test-238986200 tempest-ServerShowV254Test-238986200-project-member] [instance: d970d822-12ab-43cc-8d34-abc6eff8a68d] Failed to deallocate network for instance. Error: Networking client is experiencing an unauthorized exception.: nova.exception.NeutronAdminCredentialConfigurationInvalid: Networking client is experiencing an unauthorized exception. [ 1625.653965] env[61962]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f468495b-1acd-446a-a3ad-81c46894e357 {{(pid=61962) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1625.669554] env[61962]: DEBUG nova.compute.provider_tree [None req-55c96cbf-6671-47d9-82d1-1db28c753841 tempest-ListServerFiltersTestJSON-924365092 tempest-ListServerFiltersTestJSON-924365092-project-member] Inventory has not changed in ProviderTree for provider: 5a20dc57-fddd-49ec-bab5-953a03eebaa1 {{(pid=61962) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1625.680156] env[61962]: DEBUG nova.scheduler.client.report [None req-55c96cbf-6671-47d9-82d1-1db28c753841 tempest-ListServerFiltersTestJSON-924365092 tempest-ListServerFiltersTestJSON-924365092-project-member] Inventory has not changed for provider 5a20dc57-fddd-49ec-bab5-953a03eebaa1 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 96, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61962) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1625.684654] env[61962]: ERROR nova.compute.manager [None req-e6a2f4e5-1dbe-4445-8444-ef14363e60d0 tempest-ServerShowV254Test-238986200 tempest-ServerShowV254Test-238986200-project-member] [instance: d970d822-12ab-43cc-8d34-abc6eff8a68d] Setting instance vm_state to ERROR: nova.exception.NeutronAdminCredentialConfigurationInvalid: Networking client is experiencing an unauthorized exception. [ 1625.684654] env[61962]: ERROR nova.compute.manager [instance: d970d822-12ab-43cc-8d34-abc6eff8a68d] Traceback (most recent call last): [ 1625.684654] env[61962]: ERROR nova.compute.manager [instance: d970d822-12ab-43cc-8d34-abc6eff8a68d] File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1625.684654] env[61962]: ERROR nova.compute.manager [instance: d970d822-12ab-43cc-8d34-abc6eff8a68d] ret = obj(*args, **kwargs) [ 1625.684654] env[61962]: ERROR nova.compute.manager [instance: d970d822-12ab-43cc-8d34-abc6eff8a68d] File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 272, in _handle_fault_response [ 1625.684654] env[61962]: ERROR nova.compute.manager [instance: d970d822-12ab-43cc-8d34-abc6eff8a68d] exception_handler_v20(status_code, error_body) [ 1625.684654] env[61962]: ERROR nova.compute.manager [instance: d970d822-12ab-43cc-8d34-abc6eff8a68d] File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 90, in exception_handler_v20 [ 1625.684654] env[61962]: ERROR nova.compute.manager [instance: d970d822-12ab-43cc-8d34-abc6eff8a68d] raise client_exc(message=error_message, [ 1625.684654] env[61962]: ERROR nova.compute.manager [instance: d970d822-12ab-43cc-8d34-abc6eff8a68d] neutronclient.common.exceptions.Unauthorized: 401-{'error': {'code': 401, 'title': 'Unauthorized', 'message': 'The request you have made requires authentication.'}} [ 1625.684654] env[61962]: ERROR nova.compute.manager [instance: d970d822-12ab-43cc-8d34-abc6eff8a68d] Neutron server returns request_ids: ['req-2657af99-2fe1-4370-8454-3dd8be141cc1'] [ 1625.684654] env[61962]: ERROR nova.compute.manager [instance: d970d822-12ab-43cc-8d34-abc6eff8a68d] [ 1625.685115] env[61962]: ERROR nova.compute.manager [instance: d970d822-12ab-43cc-8d34-abc6eff8a68d] During handling of the above exception, another exception occurred: [ 1625.685115] env[61962]: ERROR nova.compute.manager [instance: d970d822-12ab-43cc-8d34-abc6eff8a68d] [ 1625.685115] env[61962]: ERROR nova.compute.manager [instance: d970d822-12ab-43cc-8d34-abc6eff8a68d] Traceback (most recent call last): [ 1625.685115] env[61962]: ERROR nova.compute.manager [instance: d970d822-12ab-43cc-8d34-abc6eff8a68d] File "/opt/stack/nova/nova/compute/manager.py", line 3333, in do_terminate_instance [ 1625.685115] env[61962]: ERROR nova.compute.manager [instance: d970d822-12ab-43cc-8d34-abc6eff8a68d] self._delete_instance(context, instance, bdms) [ 1625.685115] env[61962]: ERROR nova.compute.manager [instance: d970d822-12ab-43cc-8d34-abc6eff8a68d] File "/opt/stack/nova/nova/compute/manager.py", line 3268, in _delete_instance [ 1625.685115] env[61962]: ERROR nova.compute.manager [instance: d970d822-12ab-43cc-8d34-abc6eff8a68d] self._shutdown_instance(context, instance, bdms) [ 1625.685115] env[61962]: ERROR nova.compute.manager [instance: d970d822-12ab-43cc-8d34-abc6eff8a68d] File "/opt/stack/nova/nova/compute/manager.py", line 3162, in _shutdown_instance [ 1625.685115] env[61962]: ERROR nova.compute.manager [instance: d970d822-12ab-43cc-8d34-abc6eff8a68d] self._try_deallocate_network(context, instance, requested_networks) [ 1625.685115] env[61962]: ERROR nova.compute.manager [instance: d970d822-12ab-43cc-8d34-abc6eff8a68d] File "/opt/stack/nova/nova/compute/manager.py", line 3076, in _try_deallocate_network [ 1625.685115] env[61962]: ERROR nova.compute.manager [instance: d970d822-12ab-43cc-8d34-abc6eff8a68d] with excutils.save_and_reraise_exception(): [ 1625.685115] env[61962]: ERROR nova.compute.manager [instance: d970d822-12ab-43cc-8d34-abc6eff8a68d] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 1625.685115] env[61962]: ERROR nova.compute.manager [instance: d970d822-12ab-43cc-8d34-abc6eff8a68d] self.force_reraise() [ 1625.685585] env[61962]: ERROR nova.compute.manager [instance: d970d822-12ab-43cc-8d34-abc6eff8a68d] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 1625.685585] env[61962]: ERROR nova.compute.manager [instance: d970d822-12ab-43cc-8d34-abc6eff8a68d] raise self.value [ 1625.685585] env[61962]: ERROR nova.compute.manager [instance: d970d822-12ab-43cc-8d34-abc6eff8a68d] File "/opt/stack/nova/nova/compute/manager.py", line 3074, in _try_deallocate_network [ 1625.685585] env[61962]: ERROR nova.compute.manager [instance: d970d822-12ab-43cc-8d34-abc6eff8a68d] _deallocate_network_with_retries() [ 1625.685585] env[61962]: ERROR nova.compute.manager [instance: d970d822-12ab-43cc-8d34-abc6eff8a68d] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py", line 436, in func [ 1625.685585] env[61962]: ERROR nova.compute.manager [instance: d970d822-12ab-43cc-8d34-abc6eff8a68d] return evt.wait() [ 1625.685585] env[61962]: ERROR nova.compute.manager [instance: d970d822-12ab-43cc-8d34-abc6eff8a68d] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 1625.685585] env[61962]: ERROR nova.compute.manager [instance: d970d822-12ab-43cc-8d34-abc6eff8a68d] result = hub.switch() [ 1625.685585] env[61962]: ERROR nova.compute.manager [instance: d970d822-12ab-43cc-8d34-abc6eff8a68d] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 1625.685585] env[61962]: ERROR nova.compute.manager [instance: d970d822-12ab-43cc-8d34-abc6eff8a68d] return self.greenlet.switch() [ 1625.685585] env[61962]: ERROR nova.compute.manager [instance: d970d822-12ab-43cc-8d34-abc6eff8a68d] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py", line 150, in _run_loop [ 1625.685585] env[61962]: ERROR nova.compute.manager [instance: d970d822-12ab-43cc-8d34-abc6eff8a68d] result = func(*self.args, **self.kw) [ 1625.686069] env[61962]: ERROR nova.compute.manager [instance: d970d822-12ab-43cc-8d34-abc6eff8a68d] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py", line 407, in _func [ 1625.686069] env[61962]: ERROR nova.compute.manager [instance: d970d822-12ab-43cc-8d34-abc6eff8a68d] result = f(*args, **kwargs) [ 1625.686069] env[61962]: ERROR nova.compute.manager [instance: d970d822-12ab-43cc-8d34-abc6eff8a68d] File "/opt/stack/nova/nova/compute/manager.py", line 3063, in _deallocate_network_with_retries [ 1625.686069] env[61962]: ERROR nova.compute.manager [instance: d970d822-12ab-43cc-8d34-abc6eff8a68d] self._deallocate_network( [ 1625.686069] env[61962]: ERROR nova.compute.manager [instance: d970d822-12ab-43cc-8d34-abc6eff8a68d] File "/opt/stack/nova/nova/compute/manager.py", line 2283, in _deallocate_network [ 1625.686069] env[61962]: ERROR nova.compute.manager [instance: d970d822-12ab-43cc-8d34-abc6eff8a68d] self.network_api.deallocate_for_instance( [ 1625.686069] env[61962]: ERROR nova.compute.manager [instance: d970d822-12ab-43cc-8d34-abc6eff8a68d] File "/opt/stack/nova/nova/network/neutron.py", line 1806, in deallocate_for_instance [ 1625.686069] env[61962]: ERROR nova.compute.manager [instance: d970d822-12ab-43cc-8d34-abc6eff8a68d] data = neutron.list_ports(**search_opts) [ 1625.686069] env[61962]: ERROR nova.compute.manager [instance: d970d822-12ab-43cc-8d34-abc6eff8a68d] File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1625.686069] env[61962]: ERROR nova.compute.manager [instance: d970d822-12ab-43cc-8d34-abc6eff8a68d] ret = obj(*args, **kwargs) [ 1625.686069] env[61962]: ERROR nova.compute.manager [instance: d970d822-12ab-43cc-8d34-abc6eff8a68d] File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 815, in list_ports [ 1625.686069] env[61962]: ERROR nova.compute.manager [instance: d970d822-12ab-43cc-8d34-abc6eff8a68d] return self.list('ports', self.ports_path, retrieve_all, [ 1625.686069] env[61962]: ERROR nova.compute.manager [instance: d970d822-12ab-43cc-8d34-abc6eff8a68d] File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1625.686703] env[61962]: ERROR nova.compute.manager [instance: d970d822-12ab-43cc-8d34-abc6eff8a68d] ret = obj(*args, **kwargs) [ 1625.686703] env[61962]: ERROR nova.compute.manager [instance: d970d822-12ab-43cc-8d34-abc6eff8a68d] File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 372, in list [ 1625.686703] env[61962]: ERROR nova.compute.manager [instance: d970d822-12ab-43cc-8d34-abc6eff8a68d] for r in self._pagination(collection, path, **params): [ 1625.686703] env[61962]: ERROR nova.compute.manager [instance: d970d822-12ab-43cc-8d34-abc6eff8a68d] File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 387, in _pagination [ 1625.686703] env[61962]: ERROR nova.compute.manager [instance: d970d822-12ab-43cc-8d34-abc6eff8a68d] res = self.get(path, params=params) [ 1625.686703] env[61962]: ERROR nova.compute.manager [instance: d970d822-12ab-43cc-8d34-abc6eff8a68d] File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1625.686703] env[61962]: ERROR nova.compute.manager [instance: d970d822-12ab-43cc-8d34-abc6eff8a68d] ret = obj(*args, **kwargs) [ 1625.686703] env[61962]: ERROR nova.compute.manager [instance: d970d822-12ab-43cc-8d34-abc6eff8a68d] File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 356, in get [ 1625.686703] env[61962]: ERROR nova.compute.manager [instance: d970d822-12ab-43cc-8d34-abc6eff8a68d] return self.retry_request("GET", action, body=body, [ 1625.686703] env[61962]: ERROR nova.compute.manager [instance: d970d822-12ab-43cc-8d34-abc6eff8a68d] File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1625.686703] env[61962]: ERROR nova.compute.manager [instance: d970d822-12ab-43cc-8d34-abc6eff8a68d] ret = obj(*args, **kwargs) [ 1625.686703] env[61962]: ERROR nova.compute.manager [instance: d970d822-12ab-43cc-8d34-abc6eff8a68d] File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 333, in retry_request [ 1625.686703] env[61962]: ERROR nova.compute.manager [instance: d970d822-12ab-43cc-8d34-abc6eff8a68d] return self.do_request(method, action, body=body, [ 1625.687108] env[61962]: ERROR nova.compute.manager [instance: d970d822-12ab-43cc-8d34-abc6eff8a68d] File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1625.687108] env[61962]: ERROR nova.compute.manager [instance: d970d822-12ab-43cc-8d34-abc6eff8a68d] ret = obj(*args, **kwargs) [ 1625.687108] env[61962]: ERROR nova.compute.manager [instance: d970d822-12ab-43cc-8d34-abc6eff8a68d] File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 297, in do_request [ 1625.687108] env[61962]: ERROR nova.compute.manager [instance: d970d822-12ab-43cc-8d34-abc6eff8a68d] self._handle_fault_response(status_code, replybody, resp) [ 1625.687108] env[61962]: ERROR nova.compute.manager [instance: d970d822-12ab-43cc-8d34-abc6eff8a68d] File "/opt/stack/nova/nova/network/neutron.py", line 212, in wrapper [ 1625.687108] env[61962]: ERROR nova.compute.manager [instance: d970d822-12ab-43cc-8d34-abc6eff8a68d] raise exception.NeutronAdminCredentialConfigurationInvalid() [ 1625.687108] env[61962]: ERROR nova.compute.manager [instance: d970d822-12ab-43cc-8d34-abc6eff8a68d] nova.exception.NeutronAdminCredentialConfigurationInvalid: Networking client is experiencing an unauthorized exception. [ 1625.687108] env[61962]: ERROR nova.compute.manager [instance: d970d822-12ab-43cc-8d34-abc6eff8a68d] [ 1625.693664] env[61962]: DEBUG oslo_concurrency.lockutils [None req-55c96cbf-6671-47d9-82d1-1db28c753841 tempest-ListServerFiltersTestJSON-924365092 tempest-ListServerFiltersTestJSON-924365092-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.358s {{(pid=61962) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1625.694184] env[61962]: DEBUG nova.compute.manager [None req-55c96cbf-6671-47d9-82d1-1db28c753841 tempest-ListServerFiltersTestJSON-924365092 tempest-ListServerFiltersTestJSON-924365092-project-member] [instance: 2cf5409f-eaff-4b63-b8bb-eb15a7a81760] Start building networks asynchronously for instance. {{(pid=61962) _build_resources /opt/stack/nova/nova/compute/manager.py:2821}} [ 1625.713954] env[61962]: DEBUG oslo_concurrency.lockutils [None req-e6a2f4e5-1dbe-4445-8444-ef14363e60d0 tempest-ServerShowV254Test-238986200 tempest-ServerShowV254Test-238986200-project-member] Lock "d970d822-12ab-43cc-8d34-abc6eff8a68d" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 0.461s {{(pid=61962) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1625.730551] env[61962]: DEBUG nova.compute.utils [None req-55c96cbf-6671-47d9-82d1-1db28c753841 tempest-ListServerFiltersTestJSON-924365092 tempest-ListServerFiltersTestJSON-924365092-project-member] Using /dev/sd instead of None {{(pid=61962) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1625.733111] env[61962]: DEBUG nova.compute.manager [None req-55c96cbf-6671-47d9-82d1-1db28c753841 tempest-ListServerFiltersTestJSON-924365092 tempest-ListServerFiltersTestJSON-924365092-project-member] [instance: 2cf5409f-eaff-4b63-b8bb-eb15a7a81760] Allocating IP information in the background. {{(pid=61962) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1973}} [ 1625.733111] env[61962]: DEBUG nova.network.neutron [None req-55c96cbf-6671-47d9-82d1-1db28c753841 tempest-ListServerFiltersTestJSON-924365092 tempest-ListServerFiltersTestJSON-924365092-project-member] [instance: 2cf5409f-eaff-4b63-b8bb-eb15a7a81760] allocate_for_instance() {{(pid=61962) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 1625.744945] env[61962]: DEBUG nova.compute.manager [None req-55c96cbf-6671-47d9-82d1-1db28c753841 tempest-ListServerFiltersTestJSON-924365092 tempest-ListServerFiltersTestJSON-924365092-project-member] [instance: 2cf5409f-eaff-4b63-b8bb-eb15a7a81760] Start building block device mappings for instance. {{(pid=61962) _build_resources /opt/stack/nova/nova/compute/manager.py:2856}} [ 1625.799169] env[61962]: INFO nova.compute.manager [None req-e6a2f4e5-1dbe-4445-8444-ef14363e60d0 tempest-ServerShowV254Test-238986200 tempest-ServerShowV254Test-238986200-project-member] [instance: d970d822-12ab-43cc-8d34-abc6eff8a68d] Successfully reverted task state from None on failure for instance. [ 1625.805526] env[61962]: ERROR oslo_messaging.rpc.server [None req-e6a2f4e5-1dbe-4445-8444-ef14363e60d0 tempest-ServerShowV254Test-238986200 tempest-ServerShowV254Test-238986200-project-member] Exception during message handling: nova.exception.NeutronAdminCredentialConfigurationInvalid: Networking client is experiencing an unauthorized exception. [ 1625.805526] env[61962]: ERROR oslo_messaging.rpc.server Traceback (most recent call last): [ 1625.805526] env[61962]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1625.805526] env[61962]: ERROR oslo_messaging.rpc.server ret = obj(*args, **kwargs) [ 1625.805526] env[61962]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 272, in _handle_fault_response [ 1625.805526] env[61962]: ERROR oslo_messaging.rpc.server exception_handler_v20(status_code, error_body) [ 1625.805526] env[61962]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 90, in exception_handler_v20 [ 1625.805526] env[61962]: ERROR oslo_messaging.rpc.server raise client_exc(message=error_message, [ 1625.805526] env[61962]: ERROR oslo_messaging.rpc.server neutronclient.common.exceptions.Unauthorized: 401-{'error': {'code': 401, 'title': 'Unauthorized', 'message': 'The request you have made requires authentication.'}} [ 1625.805526] env[61962]: ERROR oslo_messaging.rpc.server Neutron server returns request_ids: ['req-2657af99-2fe1-4370-8454-3dd8be141cc1'] [ 1625.805526] env[61962]: ERROR oslo_messaging.rpc.server [ 1625.805526] env[61962]: ERROR oslo_messaging.rpc.server During handling of the above exception, another exception occurred: [ 1625.805526] env[61962]: ERROR oslo_messaging.rpc.server [ 1625.805526] env[61962]: ERROR oslo_messaging.rpc.server Traceback (most recent call last): [ 1625.805526] env[61962]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_messaging/rpc/server.py", line 165, in _process_incoming [ 1625.805526] env[61962]: ERROR oslo_messaging.rpc.server res = self.dispatcher.dispatch(message) [ 1625.806077] env[61962]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_messaging/rpc/dispatcher.py", line 309, in dispatch [ 1625.806077] env[61962]: ERROR oslo_messaging.rpc.server return self._do_dispatch(endpoint, method, ctxt, args) [ 1625.806077] env[61962]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_messaging/rpc/dispatcher.py", line 229, in _do_dispatch [ 1625.806077] env[61962]: ERROR oslo_messaging.rpc.server result = func(ctxt, **new_args) [ 1625.806077] env[61962]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/exception_wrapper.py", line 65, in wrapped [ 1625.806077] env[61962]: ERROR oslo_messaging.rpc.server with excutils.save_and_reraise_exception(): [ 1625.806077] env[61962]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 1625.806077] env[61962]: ERROR oslo_messaging.rpc.server self.force_reraise() [ 1625.806077] env[61962]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 1625.806077] env[61962]: ERROR oslo_messaging.rpc.server raise self.value [ 1625.806077] env[61962]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/exception_wrapper.py", line 63, in wrapped [ 1625.806077] env[61962]: ERROR oslo_messaging.rpc.server return f(self, context, *args, **kw) [ 1625.806077] env[61962]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 166, in decorated_function [ 1625.806077] env[61962]: ERROR oslo_messaging.rpc.server with excutils.save_and_reraise_exception(): [ 1625.806077] env[61962]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 1625.806077] env[61962]: ERROR oslo_messaging.rpc.server self.force_reraise() [ 1625.806077] env[61962]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 1625.806077] env[61962]: ERROR oslo_messaging.rpc.server raise self.value [ 1625.806563] env[61962]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 157, in decorated_function [ 1625.806563] env[61962]: ERROR oslo_messaging.rpc.server return function(self, context, *args, **kwargs) [ 1625.806563] env[61962]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/utils.py", line 1453, in decorated_function [ 1625.806563] env[61962]: ERROR oslo_messaging.rpc.server return function(self, context, *args, **kwargs) [ 1625.806563] env[61962]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 213, in decorated_function [ 1625.806563] env[61962]: ERROR oslo_messaging.rpc.server with excutils.save_and_reraise_exception(): [ 1625.806563] env[61962]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 1625.806563] env[61962]: ERROR oslo_messaging.rpc.server self.force_reraise() [ 1625.806563] env[61962]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 1625.806563] env[61962]: ERROR oslo_messaging.rpc.server raise self.value [ 1625.806563] env[61962]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 203, in decorated_function [ 1625.806563] env[61962]: ERROR oslo_messaging.rpc.server return function(self, context, *args, **kwargs) [ 1625.806563] env[61962]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 3345, in terminate_instance [ 1625.806563] env[61962]: ERROR oslo_messaging.rpc.server do_terminate_instance(instance, bdms) [ 1625.806563] env[61962]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py", line 412, in inner [ 1625.806563] env[61962]: ERROR oslo_messaging.rpc.server return f(*args, **kwargs) [ 1625.806563] env[61962]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 3340, in do_terminate_instance [ 1625.806563] env[61962]: ERROR oslo_messaging.rpc.server with excutils.save_and_reraise_exception(): [ 1625.807015] env[61962]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 1625.807015] env[61962]: ERROR oslo_messaging.rpc.server self.force_reraise() [ 1625.807015] env[61962]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 1625.807015] env[61962]: ERROR oslo_messaging.rpc.server raise self.value [ 1625.807015] env[61962]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 3333, in do_terminate_instance [ 1625.807015] env[61962]: ERROR oslo_messaging.rpc.server self._delete_instance(context, instance, bdms) [ 1625.807015] env[61962]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 3268, in _delete_instance [ 1625.807015] env[61962]: ERROR oslo_messaging.rpc.server self._shutdown_instance(context, instance, bdms) [ 1625.807015] env[61962]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 3162, in _shutdown_instance [ 1625.807015] env[61962]: ERROR oslo_messaging.rpc.server self._try_deallocate_network(context, instance, requested_networks) [ 1625.807015] env[61962]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 3076, in _try_deallocate_network [ 1625.807015] env[61962]: ERROR oslo_messaging.rpc.server with excutils.save_and_reraise_exception(): [ 1625.807015] env[61962]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 1625.807015] env[61962]: ERROR oslo_messaging.rpc.server self.force_reraise() [ 1625.807015] env[61962]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 1625.807015] env[61962]: ERROR oslo_messaging.rpc.server raise self.value [ 1625.807015] env[61962]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 3074, in _try_deallocate_network [ 1625.807015] env[61962]: ERROR oslo_messaging.rpc.server _deallocate_network_with_retries() [ 1625.807463] env[61962]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py", line 436, in func [ 1625.807463] env[61962]: ERROR oslo_messaging.rpc.server return evt.wait() [ 1625.807463] env[61962]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 1625.807463] env[61962]: ERROR oslo_messaging.rpc.server result = hub.switch() [ 1625.807463] env[61962]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 1625.807463] env[61962]: ERROR oslo_messaging.rpc.server return self.greenlet.switch() [ 1625.807463] env[61962]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py", line 150, in _run_loop [ 1625.807463] env[61962]: ERROR oslo_messaging.rpc.server result = func(*self.args, **self.kw) [ 1625.807463] env[61962]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py", line 407, in _func [ 1625.807463] env[61962]: ERROR oslo_messaging.rpc.server result = f(*args, **kwargs) [ 1625.807463] env[61962]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 3063, in _deallocate_network_with_retries [ 1625.807463] env[61962]: ERROR oslo_messaging.rpc.server self._deallocate_network( [ 1625.807463] env[61962]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 2283, in _deallocate_network [ 1625.807463] env[61962]: ERROR oslo_messaging.rpc.server self.network_api.deallocate_for_instance( [ 1625.807463] env[61962]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/network/neutron.py", line 1806, in deallocate_for_instance [ 1625.807463] env[61962]: ERROR oslo_messaging.rpc.server data = neutron.list_ports(**search_opts) [ 1625.807463] env[61962]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1625.807463] env[61962]: ERROR oslo_messaging.rpc.server ret = obj(*args, **kwargs) [ 1625.807897] env[61962]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 815, in list_ports [ 1625.807897] env[61962]: ERROR oslo_messaging.rpc.server return self.list('ports', self.ports_path, retrieve_all, [ 1625.807897] env[61962]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1625.807897] env[61962]: ERROR oslo_messaging.rpc.server ret = obj(*args, **kwargs) [ 1625.807897] env[61962]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 372, in list [ 1625.807897] env[61962]: ERROR oslo_messaging.rpc.server for r in self._pagination(collection, path, **params): [ 1625.807897] env[61962]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 387, in _pagination [ 1625.807897] env[61962]: ERROR oslo_messaging.rpc.server res = self.get(path, params=params) [ 1625.807897] env[61962]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1625.807897] env[61962]: ERROR oslo_messaging.rpc.server ret = obj(*args, **kwargs) [ 1625.807897] env[61962]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 356, in get [ 1625.807897] env[61962]: ERROR oslo_messaging.rpc.server return self.retry_request("GET", action, body=body, [ 1625.807897] env[61962]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1625.807897] env[61962]: ERROR oslo_messaging.rpc.server ret = obj(*args, **kwargs) [ 1625.807897] env[61962]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 333, in retry_request [ 1625.807897] env[61962]: ERROR oslo_messaging.rpc.server return self.do_request(method, action, body=body, [ 1625.807897] env[61962]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1625.807897] env[61962]: ERROR oslo_messaging.rpc.server ret = obj(*args, **kwargs) [ 1625.808350] env[61962]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 297, in do_request [ 1625.808350] env[61962]: ERROR oslo_messaging.rpc.server self._handle_fault_response(status_code, replybody, resp) [ 1625.808350] env[61962]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/network/neutron.py", line 212, in wrapper [ 1625.808350] env[61962]: ERROR oslo_messaging.rpc.server raise exception.NeutronAdminCredentialConfigurationInvalid() [ 1625.808350] env[61962]: ERROR oslo_messaging.rpc.server nova.exception.NeutronAdminCredentialConfigurationInvalid: Networking client is experiencing an unauthorized exception. [ 1625.808350] env[61962]: ERROR oslo_messaging.rpc.server [ 1625.815604] env[61962]: DEBUG nova.policy [None req-55c96cbf-6671-47d9-82d1-1db28c753841 tempest-ListServerFiltersTestJSON-924365092 tempest-ListServerFiltersTestJSON-924365092-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '68caadc6bfc440da9921d99309e9dce0', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '56bb35c701074f02820d93ad5911d089', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61962) authorize /opt/stack/nova/nova/policy.py:203}} [ 1625.829192] env[61962]: DEBUG nova.compute.manager [None req-55c96cbf-6671-47d9-82d1-1db28c753841 tempest-ListServerFiltersTestJSON-924365092 tempest-ListServerFiltersTestJSON-924365092-project-member] [instance: 2cf5409f-eaff-4b63-b8bb-eb15a7a81760] Start spawning the instance on the hypervisor. {{(pid=61962) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2630}} [ 1625.857162] env[61962]: DEBUG nova.virt.hardware [None req-55c96cbf-6671-47d9-82d1-1db28c753841 tempest-ListServerFiltersTestJSON-924365092 tempest-ListServerFiltersTestJSON-924365092-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-12-01T12:09:23Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=128,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-12-01T12:09:07Z,direct_url=,disk_format='vmdk',id=f684bb17-3ab2-4bd5-a19e-4c36c57d0ebe,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='eef556fb5c4f49b889491ae31a496de5',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-12-01T12:09:08Z,virtual_size=,visibility=), allow threads: False {{(pid=61962) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1625.857436] env[61962]: DEBUG nova.virt.hardware [None req-55c96cbf-6671-47d9-82d1-1db28c753841 tempest-ListServerFiltersTestJSON-924365092 tempest-ListServerFiltersTestJSON-924365092-project-member] Flavor limits 0:0:0 {{(pid=61962) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1625.857619] env[61962]: DEBUG nova.virt.hardware [None req-55c96cbf-6671-47d9-82d1-1db28c753841 tempest-ListServerFiltersTestJSON-924365092 tempest-ListServerFiltersTestJSON-924365092-project-member] Image limits 0:0:0 {{(pid=61962) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1625.857808] env[61962]: DEBUG nova.virt.hardware [None req-55c96cbf-6671-47d9-82d1-1db28c753841 tempest-ListServerFiltersTestJSON-924365092 tempest-ListServerFiltersTestJSON-924365092-project-member] Flavor pref 0:0:0 {{(pid=61962) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1625.857954] env[61962]: DEBUG nova.virt.hardware [None req-55c96cbf-6671-47d9-82d1-1db28c753841 tempest-ListServerFiltersTestJSON-924365092 tempest-ListServerFiltersTestJSON-924365092-project-member] Image pref 0:0:0 {{(pid=61962) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1625.858117] env[61962]: DEBUG nova.virt.hardware [None req-55c96cbf-6671-47d9-82d1-1db28c753841 tempest-ListServerFiltersTestJSON-924365092 tempest-ListServerFiltersTestJSON-924365092-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61962) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1625.858330] env[61962]: DEBUG nova.virt.hardware [None req-55c96cbf-6671-47d9-82d1-1db28c753841 tempest-ListServerFiltersTestJSON-924365092 tempest-ListServerFiltersTestJSON-924365092-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61962) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1625.858491] env[61962]: DEBUG nova.virt.hardware [None req-55c96cbf-6671-47d9-82d1-1db28c753841 tempest-ListServerFiltersTestJSON-924365092 tempest-ListServerFiltersTestJSON-924365092-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61962) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1625.858660] env[61962]: DEBUG nova.virt.hardware [None req-55c96cbf-6671-47d9-82d1-1db28c753841 tempest-ListServerFiltersTestJSON-924365092 tempest-ListServerFiltersTestJSON-924365092-project-member] Got 1 possible topologies {{(pid=61962) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1625.858823] env[61962]: DEBUG nova.virt.hardware [None req-55c96cbf-6671-47d9-82d1-1db28c753841 tempest-ListServerFiltersTestJSON-924365092 tempest-ListServerFiltersTestJSON-924365092-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61962) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1625.859013] env[61962]: DEBUG nova.virt.hardware [None req-55c96cbf-6671-47d9-82d1-1db28c753841 tempest-ListServerFiltersTestJSON-924365092 tempest-ListServerFiltersTestJSON-924365092-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61962) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1625.860076] env[61962]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-67cc2650-5618-422a-a78f-475dce0560b0 {{(pid=61962) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1625.869281] env[61962]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6586d150-a8c6-4831-a3c7-7f97f02ded06 {{(pid=61962) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1626.017185] env[61962]: DEBUG nova.compute.manager [req-88c5bf63-243c-4748-98b0-9f85365e8a1a req-0c975304-9c3d-4218-9018-8a9b10ab779c service nova] [instance: 1e71cc99-f026-4197-b7f1-ae18f7f99af6] Received event network-vif-plugged-28222adf-fbb7-4199-a9e2-1f3e4d389e4d {{(pid=61962) external_instance_event /opt/stack/nova/nova/compute/manager.py:11210}} [ 1626.017423] env[61962]: DEBUG oslo_concurrency.lockutils [req-88c5bf63-243c-4748-98b0-9f85365e8a1a req-0c975304-9c3d-4218-9018-8a9b10ab779c service nova] Acquiring lock "1e71cc99-f026-4197-b7f1-ae18f7f99af6-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=61962) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1626.017641] env[61962]: DEBUG oslo_concurrency.lockutils [req-88c5bf63-243c-4748-98b0-9f85365e8a1a req-0c975304-9c3d-4218-9018-8a9b10ab779c service nova] Lock "1e71cc99-f026-4197-b7f1-ae18f7f99af6-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=61962) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1626.017839] env[61962]: DEBUG oslo_concurrency.lockutils [req-88c5bf63-243c-4748-98b0-9f85365e8a1a req-0c975304-9c3d-4218-9018-8a9b10ab779c service nova] Lock "1e71cc99-f026-4197-b7f1-ae18f7f99af6-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=61962) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1626.018016] env[61962]: DEBUG nova.compute.manager [req-88c5bf63-243c-4748-98b0-9f85365e8a1a req-0c975304-9c3d-4218-9018-8a9b10ab779c service nova] [instance: 1e71cc99-f026-4197-b7f1-ae18f7f99af6] No waiting events found dispatching network-vif-plugged-28222adf-fbb7-4199-a9e2-1f3e4d389e4d {{(pid=61962) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1626.018242] env[61962]: WARNING nova.compute.manager [req-88c5bf63-243c-4748-98b0-9f85365e8a1a req-0c975304-9c3d-4218-9018-8a9b10ab779c service nova] [instance: 1e71cc99-f026-4197-b7f1-ae18f7f99af6] Received unexpected event network-vif-plugged-28222adf-fbb7-4199-a9e2-1f3e4d389e4d for instance with vm_state building and task_state spawning. [ 1626.075713] env[61962]: DEBUG nova.network.neutron [None req-8efd776f-39ec-4e55-8eeb-a7bd10a3e451 tempest-ListServerFiltersTestJSON-924365092 tempest-ListServerFiltersTestJSON-924365092-project-member] [instance: 1e71cc99-f026-4197-b7f1-ae18f7f99af6] Successfully updated port: 28222adf-fbb7-4199-a9e2-1f3e4d389e4d {{(pid=61962) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1626.089094] env[61962]: DEBUG oslo_concurrency.lockutils [None req-8efd776f-39ec-4e55-8eeb-a7bd10a3e451 tempest-ListServerFiltersTestJSON-924365092 tempest-ListServerFiltersTestJSON-924365092-project-member] Acquiring lock "refresh_cache-1e71cc99-f026-4197-b7f1-ae18f7f99af6" {{(pid=61962) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1626.089309] env[61962]: DEBUG oslo_concurrency.lockutils [None req-8efd776f-39ec-4e55-8eeb-a7bd10a3e451 tempest-ListServerFiltersTestJSON-924365092 tempest-ListServerFiltersTestJSON-924365092-project-member] Acquired lock "refresh_cache-1e71cc99-f026-4197-b7f1-ae18f7f99af6" {{(pid=61962) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1626.089408] env[61962]: DEBUG nova.network.neutron [None req-8efd776f-39ec-4e55-8eeb-a7bd10a3e451 tempest-ListServerFiltersTestJSON-924365092 tempest-ListServerFiltersTestJSON-924365092-project-member] [instance: 1e71cc99-f026-4197-b7f1-ae18f7f99af6] Building network info cache for instance {{(pid=61962) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 1626.137752] env[61962]: DEBUG nova.network.neutron [None req-8efd776f-39ec-4e55-8eeb-a7bd10a3e451 tempest-ListServerFiltersTestJSON-924365092 tempest-ListServerFiltersTestJSON-924365092-project-member] [instance: 1e71cc99-f026-4197-b7f1-ae18f7f99af6] Instance cache missing network info. {{(pid=61962) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 1626.230952] env[61962]: DEBUG nova.network.neutron [None req-55c96cbf-6671-47d9-82d1-1db28c753841 tempest-ListServerFiltersTestJSON-924365092 tempest-ListServerFiltersTestJSON-924365092-project-member] [instance: 2cf5409f-eaff-4b63-b8bb-eb15a7a81760] Successfully created port: 9436547d-6e92-4a1e-b13b-11a30db7a7b3 {{(pid=61962) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1626.331502] env[61962]: DEBUG nova.network.neutron [None req-8efd776f-39ec-4e55-8eeb-a7bd10a3e451 tempest-ListServerFiltersTestJSON-924365092 tempest-ListServerFiltersTestJSON-924365092-project-member] [instance: 1e71cc99-f026-4197-b7f1-ae18f7f99af6] Updating instance_info_cache with network_info: [{"id": "28222adf-fbb7-4199-a9e2-1f3e4d389e4d", "address": "fa:16:3e:33:e7:5d", "network": {"id": "9fb6d971-d29d-42af-a8df-f5bdda212cf4", "bridge": "br-int", "label": "tempest-ListServerFiltersTestJSON-1248701816-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "56bb35c701074f02820d93ad5911d089", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "7894814c-6be3-4b80-a08e-4a771bc05dd1", "external-id": "nsx-vlan-transportzone-948", "segmentation_id": 948, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap28222adf-fb", "ovs_interfaceid": "28222adf-fbb7-4199-a9e2-1f3e4d389e4d", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61962) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1626.350370] env[61962]: DEBUG oslo_concurrency.lockutils [None req-8efd776f-39ec-4e55-8eeb-a7bd10a3e451 tempest-ListServerFiltersTestJSON-924365092 tempest-ListServerFiltersTestJSON-924365092-project-member] Releasing lock "refresh_cache-1e71cc99-f026-4197-b7f1-ae18f7f99af6" {{(pid=61962) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1626.350911] env[61962]: DEBUG nova.compute.manager [None req-8efd776f-39ec-4e55-8eeb-a7bd10a3e451 tempest-ListServerFiltersTestJSON-924365092 tempest-ListServerFiltersTestJSON-924365092-project-member] [instance: 1e71cc99-f026-4197-b7f1-ae18f7f99af6] Instance network_info: |[{"id": "28222adf-fbb7-4199-a9e2-1f3e4d389e4d", "address": "fa:16:3e:33:e7:5d", "network": {"id": "9fb6d971-d29d-42af-a8df-f5bdda212cf4", "bridge": "br-int", "label": "tempest-ListServerFiltersTestJSON-1248701816-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "56bb35c701074f02820d93ad5911d089", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "7894814c-6be3-4b80-a08e-4a771bc05dd1", "external-id": "nsx-vlan-transportzone-948", "segmentation_id": 948, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap28222adf-fb", "ovs_interfaceid": "28222adf-fbb7-4199-a9e2-1f3e4d389e4d", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=61962) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1626.351650] env[61962]: DEBUG nova.virt.vmwareapi.vmops [None req-8efd776f-39ec-4e55-8eeb-a7bd10a3e451 tempest-ListServerFiltersTestJSON-924365092 tempest-ListServerFiltersTestJSON-924365092-project-member] [instance: 1e71cc99-f026-4197-b7f1-ae18f7f99af6] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:33:e7:5d', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '7894814c-6be3-4b80-a08e-4a771bc05dd1', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '28222adf-fbb7-4199-a9e2-1f3e4d389e4d', 'vif_model': 'vmxnet3'}] {{(pid=61962) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1626.360182] env[61962]: DEBUG nova.virt.vmwareapi.vm_util [None req-8efd776f-39ec-4e55-8eeb-a7bd10a3e451 tempest-ListServerFiltersTestJSON-924365092 tempest-ListServerFiltersTestJSON-924365092-project-member] Creating folder: Project (56bb35c701074f02820d93ad5911d089). Parent ref: group-v953327. {{(pid=61962) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 1626.360885] env[61962]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-eba414cf-0d41-4929-9729-31865b93cb87 {{(pid=61962) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1626.371973] env[61962]: INFO nova.virt.vmwareapi.vm_util [None req-8efd776f-39ec-4e55-8eeb-a7bd10a3e451 tempest-ListServerFiltersTestJSON-924365092 tempest-ListServerFiltersTestJSON-924365092-project-member] Created folder: Project (56bb35c701074f02820d93ad5911d089) in parent group-v953327. [ 1626.372208] env[61962]: DEBUG nova.virt.vmwareapi.vm_util [None req-8efd776f-39ec-4e55-8eeb-a7bd10a3e451 tempest-ListServerFiltersTestJSON-924365092 tempest-ListServerFiltersTestJSON-924365092-project-member] Creating folder: Instances. Parent ref: group-v953417. {{(pid=61962) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 1626.372837] env[61962]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-9b0ae746-37eb-4866-a43c-8d50725465f7 {{(pid=61962) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1626.382439] env[61962]: INFO nova.virt.vmwareapi.vm_util [None req-8efd776f-39ec-4e55-8eeb-a7bd10a3e451 tempest-ListServerFiltersTestJSON-924365092 tempest-ListServerFiltersTestJSON-924365092-project-member] Created folder: Instances in parent group-v953417. [ 1626.382699] env[61962]: DEBUG oslo.service.loopingcall [None req-8efd776f-39ec-4e55-8eeb-a7bd10a3e451 tempest-ListServerFiltersTestJSON-924365092 tempest-ListServerFiltersTestJSON-924365092-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=61962) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1626.382897] env[61962]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 1e71cc99-f026-4197-b7f1-ae18f7f99af6] Creating VM on the ESX host {{(pid=61962) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 1626.383133] env[61962]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-7949d050-5691-4608-b06d-efc494aa69e4 {{(pid=61962) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1626.404588] env[61962]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1626.404588] env[61962]: value = "task-4892065" [ 1626.404588] env[61962]: _type = "Task" [ 1626.404588] env[61962]: } to complete. {{(pid=61962) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1626.412548] env[61962]: DEBUG oslo_vmware.api [-] Task: {'id': task-4892065, 'name': CreateVM_Task} progress is 0%. {{(pid=61962) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1626.822959] env[61962]: DEBUG nova.compute.manager [req-0b5d8db5-b08c-45f0-9a1f-b1522aa4cb7d req-eba5ca30-d63f-4410-8bde-838c9b48f149 service nova] [instance: 2cf5409f-eaff-4b63-b8bb-eb15a7a81760] Received event network-vif-plugged-9436547d-6e92-4a1e-b13b-11a30db7a7b3 {{(pid=61962) external_instance_event /opt/stack/nova/nova/compute/manager.py:11210}} [ 1626.823199] env[61962]: DEBUG oslo_concurrency.lockutils [req-0b5d8db5-b08c-45f0-9a1f-b1522aa4cb7d req-eba5ca30-d63f-4410-8bde-838c9b48f149 service nova] Acquiring lock "2cf5409f-eaff-4b63-b8bb-eb15a7a81760-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=61962) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1626.823403] env[61962]: DEBUG oslo_concurrency.lockutils [req-0b5d8db5-b08c-45f0-9a1f-b1522aa4cb7d req-eba5ca30-d63f-4410-8bde-838c9b48f149 service nova] Lock "2cf5409f-eaff-4b63-b8bb-eb15a7a81760-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=61962) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1626.823569] env[61962]: DEBUG oslo_concurrency.lockutils [req-0b5d8db5-b08c-45f0-9a1f-b1522aa4cb7d req-eba5ca30-d63f-4410-8bde-838c9b48f149 service nova] Lock "2cf5409f-eaff-4b63-b8bb-eb15a7a81760-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=61962) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1626.823730] env[61962]: DEBUG nova.compute.manager [req-0b5d8db5-b08c-45f0-9a1f-b1522aa4cb7d req-eba5ca30-d63f-4410-8bde-838c9b48f149 service nova] [instance: 2cf5409f-eaff-4b63-b8bb-eb15a7a81760] No waiting events found dispatching network-vif-plugged-9436547d-6e92-4a1e-b13b-11a30db7a7b3 {{(pid=61962) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1626.823892] env[61962]: WARNING nova.compute.manager [req-0b5d8db5-b08c-45f0-9a1f-b1522aa4cb7d req-eba5ca30-d63f-4410-8bde-838c9b48f149 service nova] [instance: 2cf5409f-eaff-4b63-b8bb-eb15a7a81760] Received unexpected event network-vif-plugged-9436547d-6e92-4a1e-b13b-11a30db7a7b3 for instance with vm_state building and task_state spawning. [ 1626.912782] env[61962]: DEBUG nova.network.neutron [None req-55c96cbf-6671-47d9-82d1-1db28c753841 tempest-ListServerFiltersTestJSON-924365092 tempest-ListServerFiltersTestJSON-924365092-project-member] [instance: 2cf5409f-eaff-4b63-b8bb-eb15a7a81760] Successfully updated port: 9436547d-6e92-4a1e-b13b-11a30db7a7b3 {{(pid=61962) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1626.917201] env[61962]: DEBUG oslo_vmware.api [-] Task: {'id': task-4892065, 'name': CreateVM_Task, 'duration_secs': 0.328852} completed successfully. {{(pid=61962) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1626.917582] env[61962]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 1e71cc99-f026-4197-b7f1-ae18f7f99af6] Created VM on the ESX host {{(pid=61962) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 1626.918264] env[61962]: DEBUG oslo_concurrency.lockutils [None req-8efd776f-39ec-4e55-8eeb-a7bd10a3e451 tempest-ListServerFiltersTestJSON-924365092 tempest-ListServerFiltersTestJSON-924365092-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/f684bb17-3ab2-4bd5-a19e-4c36c57d0ebe" {{(pid=61962) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1626.918440] env[61962]: DEBUG oslo_concurrency.lockutils [None req-8efd776f-39ec-4e55-8eeb-a7bd10a3e451 tempest-ListServerFiltersTestJSON-924365092 tempest-ListServerFiltersTestJSON-924365092-project-member] Acquired lock "[datastore2] devstack-image-cache_base/f684bb17-3ab2-4bd5-a19e-4c36c57d0ebe" {{(pid=61962) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1626.918776] env[61962]: DEBUG oslo_concurrency.lockutils [None req-8efd776f-39ec-4e55-8eeb-a7bd10a3e451 tempest-ListServerFiltersTestJSON-924365092 tempest-ListServerFiltersTestJSON-924365092-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/f684bb17-3ab2-4bd5-a19e-4c36c57d0ebe" {{(pid=61962) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1626.919232] env[61962]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-b3e4f5e3-0a9d-4535-8484-dd350bee62b5 {{(pid=61962) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1626.925096] env[61962]: DEBUG oslo_vmware.api [None req-8efd776f-39ec-4e55-8eeb-a7bd10a3e451 tempest-ListServerFiltersTestJSON-924365092 tempest-ListServerFiltersTestJSON-924365092-project-member] Waiting for the task: (returnval){ [ 1626.925096] env[61962]: value = "session[5210918f-aadc-9b29-42fa-0929c2e42627]52373b06-d613-1f46-da4a-af51eaa3845f" [ 1626.925096] env[61962]: _type = "Task" [ 1626.925096] env[61962]: } to complete. {{(pid=61962) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1626.926316] env[61962]: DEBUG oslo_concurrency.lockutils [None req-55c96cbf-6671-47d9-82d1-1db28c753841 tempest-ListServerFiltersTestJSON-924365092 tempest-ListServerFiltersTestJSON-924365092-project-member] Acquiring lock "refresh_cache-2cf5409f-eaff-4b63-b8bb-eb15a7a81760" {{(pid=61962) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1626.926468] env[61962]: DEBUG oslo_concurrency.lockutils [None req-55c96cbf-6671-47d9-82d1-1db28c753841 tempest-ListServerFiltersTestJSON-924365092 tempest-ListServerFiltersTestJSON-924365092-project-member] Acquired lock "refresh_cache-2cf5409f-eaff-4b63-b8bb-eb15a7a81760" {{(pid=61962) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1626.926617] env[61962]: DEBUG nova.network.neutron [None req-55c96cbf-6671-47d9-82d1-1db28c753841 tempest-ListServerFiltersTestJSON-924365092 tempest-ListServerFiltersTestJSON-924365092-project-member] [instance: 2cf5409f-eaff-4b63-b8bb-eb15a7a81760] Building network info cache for instance {{(pid=61962) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 1626.940178] env[61962]: DEBUG oslo_vmware.api [None req-8efd776f-39ec-4e55-8eeb-a7bd10a3e451 tempest-ListServerFiltersTestJSON-924365092 tempest-ListServerFiltersTestJSON-924365092-project-member] Task: {'id': session[5210918f-aadc-9b29-42fa-0929c2e42627]52373b06-d613-1f46-da4a-af51eaa3845f, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61962) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1626.981077] env[61962]: DEBUG nova.network.neutron [None req-55c96cbf-6671-47d9-82d1-1db28c753841 tempest-ListServerFiltersTestJSON-924365092 tempest-ListServerFiltersTestJSON-924365092-project-member] [instance: 2cf5409f-eaff-4b63-b8bb-eb15a7a81760] Instance cache missing network info. {{(pid=61962) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 1627.174561] env[61962]: DEBUG nova.network.neutron [None req-55c96cbf-6671-47d9-82d1-1db28c753841 tempest-ListServerFiltersTestJSON-924365092 tempest-ListServerFiltersTestJSON-924365092-project-member] [instance: 2cf5409f-eaff-4b63-b8bb-eb15a7a81760] Updating instance_info_cache with network_info: [{"id": "9436547d-6e92-4a1e-b13b-11a30db7a7b3", "address": "fa:16:3e:78:c6:4d", "network": {"id": "9fb6d971-d29d-42af-a8df-f5bdda212cf4", "bridge": "br-int", "label": "tempest-ListServerFiltersTestJSON-1248701816-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "56bb35c701074f02820d93ad5911d089", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "7894814c-6be3-4b80-a08e-4a771bc05dd1", "external-id": "nsx-vlan-transportzone-948", "segmentation_id": 948, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap9436547d-6e", "ovs_interfaceid": "9436547d-6e92-4a1e-b13b-11a30db7a7b3", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61962) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1627.189341] env[61962]: DEBUG oslo_concurrency.lockutils [None req-55c96cbf-6671-47d9-82d1-1db28c753841 tempest-ListServerFiltersTestJSON-924365092 tempest-ListServerFiltersTestJSON-924365092-project-member] Releasing lock "refresh_cache-2cf5409f-eaff-4b63-b8bb-eb15a7a81760" {{(pid=61962) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1627.189661] env[61962]: DEBUG nova.compute.manager [None req-55c96cbf-6671-47d9-82d1-1db28c753841 tempest-ListServerFiltersTestJSON-924365092 tempest-ListServerFiltersTestJSON-924365092-project-member] [instance: 2cf5409f-eaff-4b63-b8bb-eb15a7a81760] Instance network_info: |[{"id": "9436547d-6e92-4a1e-b13b-11a30db7a7b3", "address": "fa:16:3e:78:c6:4d", "network": {"id": "9fb6d971-d29d-42af-a8df-f5bdda212cf4", "bridge": "br-int", "label": "tempest-ListServerFiltersTestJSON-1248701816-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "56bb35c701074f02820d93ad5911d089", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "7894814c-6be3-4b80-a08e-4a771bc05dd1", "external-id": "nsx-vlan-transportzone-948", "segmentation_id": 948, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap9436547d-6e", "ovs_interfaceid": "9436547d-6e92-4a1e-b13b-11a30db7a7b3", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=61962) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1627.190251] env[61962]: DEBUG nova.virt.vmwareapi.vmops [None req-55c96cbf-6671-47d9-82d1-1db28c753841 tempest-ListServerFiltersTestJSON-924365092 tempest-ListServerFiltersTestJSON-924365092-project-member] [instance: 2cf5409f-eaff-4b63-b8bb-eb15a7a81760] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:78:c6:4d', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '7894814c-6be3-4b80-a08e-4a771bc05dd1', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '9436547d-6e92-4a1e-b13b-11a30db7a7b3', 'vif_model': 'vmxnet3'}] {{(pid=61962) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1627.200017] env[61962]: DEBUG oslo.service.loopingcall [None req-55c96cbf-6671-47d9-82d1-1db28c753841 tempest-ListServerFiltersTestJSON-924365092 tempest-ListServerFiltersTestJSON-924365092-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=61962) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1627.200179] env[61962]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 2cf5409f-eaff-4b63-b8bb-eb15a7a81760] Creating VM on the ESX host {{(pid=61962) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 1627.200373] env[61962]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-4c32a718-fb1a-43fc-b16b-dfff942df353 {{(pid=61962) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1627.225317] env[61962]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1627.225317] env[61962]: value = "task-4892066" [ 1627.225317] env[61962]: _type = "Task" [ 1627.225317] env[61962]: } to complete. {{(pid=61962) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1627.234437] env[61962]: DEBUG oslo_vmware.api [-] Task: {'id': task-4892066, 'name': CreateVM_Task} progress is 0%. {{(pid=61962) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1627.437956] env[61962]: DEBUG oslo_concurrency.lockutils [None req-8efd776f-39ec-4e55-8eeb-a7bd10a3e451 tempest-ListServerFiltersTestJSON-924365092 tempest-ListServerFiltersTestJSON-924365092-project-member] Releasing lock "[datastore2] devstack-image-cache_base/f684bb17-3ab2-4bd5-a19e-4c36c57d0ebe" {{(pid=61962) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1627.438330] env[61962]: DEBUG nova.virt.vmwareapi.vmops [None req-8efd776f-39ec-4e55-8eeb-a7bd10a3e451 tempest-ListServerFiltersTestJSON-924365092 tempest-ListServerFiltersTestJSON-924365092-project-member] [instance: 1e71cc99-f026-4197-b7f1-ae18f7f99af6] Processing image f684bb17-3ab2-4bd5-a19e-4c36c57d0ebe {{(pid=61962) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1627.438556] env[61962]: DEBUG oslo_concurrency.lockutils [None req-8efd776f-39ec-4e55-8eeb-a7bd10a3e451 tempest-ListServerFiltersTestJSON-924365092 tempest-ListServerFiltersTestJSON-924365092-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/f684bb17-3ab2-4bd5-a19e-4c36c57d0ebe/f684bb17-3ab2-4bd5-a19e-4c36c57d0ebe.vmdk" {{(pid=61962) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1627.735749] env[61962]: DEBUG oslo_vmware.api [-] Task: {'id': task-4892066, 'name': CreateVM_Task, 'duration_secs': 0.307787} completed successfully. {{(pid=61962) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1627.735932] env[61962]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 2cf5409f-eaff-4b63-b8bb-eb15a7a81760] Created VM on the ESX host {{(pid=61962) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 1627.736600] env[61962]: DEBUG oslo_concurrency.lockutils [None req-55c96cbf-6671-47d9-82d1-1db28c753841 tempest-ListServerFiltersTestJSON-924365092 tempest-ListServerFiltersTestJSON-924365092-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/f684bb17-3ab2-4bd5-a19e-4c36c57d0ebe" {{(pid=61962) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1627.736734] env[61962]: DEBUG oslo_concurrency.lockutils [None req-55c96cbf-6671-47d9-82d1-1db28c753841 tempest-ListServerFiltersTestJSON-924365092 tempest-ListServerFiltersTestJSON-924365092-project-member] Acquired lock "[datastore2] devstack-image-cache_base/f684bb17-3ab2-4bd5-a19e-4c36c57d0ebe" {{(pid=61962) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1627.737044] env[61962]: DEBUG oslo_concurrency.lockutils [None req-55c96cbf-6671-47d9-82d1-1db28c753841 tempest-ListServerFiltersTestJSON-924365092 tempest-ListServerFiltersTestJSON-924365092-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/f684bb17-3ab2-4bd5-a19e-4c36c57d0ebe" {{(pid=61962) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1627.737288] env[61962]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-66fbb938-9b01-44a9-931d-3aa3708ec870 {{(pid=61962) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1627.742171] env[61962]: DEBUG oslo_vmware.api [None req-55c96cbf-6671-47d9-82d1-1db28c753841 tempest-ListServerFiltersTestJSON-924365092 tempest-ListServerFiltersTestJSON-924365092-project-member] Waiting for the task: (returnval){ [ 1627.742171] env[61962]: value = "session[5210918f-aadc-9b29-42fa-0929c2e42627]522225d2-756a-acbd-9e3a-28113a6d8a05" [ 1627.742171] env[61962]: _type = "Task" [ 1627.742171] env[61962]: } to complete. {{(pid=61962) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1627.750413] env[61962]: DEBUG oslo_vmware.api [None req-55c96cbf-6671-47d9-82d1-1db28c753841 tempest-ListServerFiltersTestJSON-924365092 tempest-ListServerFiltersTestJSON-924365092-project-member] Task: {'id': session[5210918f-aadc-9b29-42fa-0929c2e42627]522225d2-756a-acbd-9e3a-28113a6d8a05, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61962) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1628.044012] env[61962]: DEBUG nova.compute.manager [req-9c9c9e81-622d-475c-bfcf-2b372a34072c req-66497f23-44ce-4fd1-b7a6-bc19b907d9c8 service nova] [instance: 1e71cc99-f026-4197-b7f1-ae18f7f99af6] Received event network-changed-28222adf-fbb7-4199-a9e2-1f3e4d389e4d {{(pid=61962) external_instance_event /opt/stack/nova/nova/compute/manager.py:11210}} [ 1628.044240] env[61962]: DEBUG nova.compute.manager [req-9c9c9e81-622d-475c-bfcf-2b372a34072c req-66497f23-44ce-4fd1-b7a6-bc19b907d9c8 service nova] [instance: 1e71cc99-f026-4197-b7f1-ae18f7f99af6] Refreshing instance network info cache due to event network-changed-28222adf-fbb7-4199-a9e2-1f3e4d389e4d. {{(pid=61962) external_instance_event /opt/stack/nova/nova/compute/manager.py:11215}} [ 1628.044464] env[61962]: DEBUG oslo_concurrency.lockutils [req-9c9c9e81-622d-475c-bfcf-2b372a34072c req-66497f23-44ce-4fd1-b7a6-bc19b907d9c8 service nova] Acquiring lock "refresh_cache-1e71cc99-f026-4197-b7f1-ae18f7f99af6" {{(pid=61962) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1628.044611] env[61962]: DEBUG oslo_concurrency.lockutils [req-9c9c9e81-622d-475c-bfcf-2b372a34072c req-66497f23-44ce-4fd1-b7a6-bc19b907d9c8 service nova] Acquired lock "refresh_cache-1e71cc99-f026-4197-b7f1-ae18f7f99af6" {{(pid=61962) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1628.044769] env[61962]: DEBUG nova.network.neutron [req-9c9c9e81-622d-475c-bfcf-2b372a34072c req-66497f23-44ce-4fd1-b7a6-bc19b907d9c8 service nova] [instance: 1e71cc99-f026-4197-b7f1-ae18f7f99af6] Refreshing network info cache for port 28222adf-fbb7-4199-a9e2-1f3e4d389e4d {{(pid=61962) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 1628.253117] env[61962]: DEBUG oslo_concurrency.lockutils [None req-55c96cbf-6671-47d9-82d1-1db28c753841 tempest-ListServerFiltersTestJSON-924365092 tempest-ListServerFiltersTestJSON-924365092-project-member] Releasing lock "[datastore2] devstack-image-cache_base/f684bb17-3ab2-4bd5-a19e-4c36c57d0ebe" {{(pid=61962) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1628.253391] env[61962]: DEBUG nova.virt.vmwareapi.vmops [None req-55c96cbf-6671-47d9-82d1-1db28c753841 tempest-ListServerFiltersTestJSON-924365092 tempest-ListServerFiltersTestJSON-924365092-project-member] [instance: 2cf5409f-eaff-4b63-b8bb-eb15a7a81760] Processing image f684bb17-3ab2-4bd5-a19e-4c36c57d0ebe {{(pid=61962) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1628.253602] env[61962]: DEBUG oslo_concurrency.lockutils [None req-55c96cbf-6671-47d9-82d1-1db28c753841 tempest-ListServerFiltersTestJSON-924365092 tempest-ListServerFiltersTestJSON-924365092-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/f684bb17-3ab2-4bd5-a19e-4c36c57d0ebe/f684bb17-3ab2-4bd5-a19e-4c36c57d0ebe.vmdk" {{(pid=61962) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1628.332055] env[61962]: DEBUG nova.network.neutron [req-9c9c9e81-622d-475c-bfcf-2b372a34072c req-66497f23-44ce-4fd1-b7a6-bc19b907d9c8 service nova] [instance: 1e71cc99-f026-4197-b7f1-ae18f7f99af6] Updated VIF entry in instance network info cache for port 28222adf-fbb7-4199-a9e2-1f3e4d389e4d. {{(pid=61962) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 1628.332427] env[61962]: DEBUG nova.network.neutron [req-9c9c9e81-622d-475c-bfcf-2b372a34072c req-66497f23-44ce-4fd1-b7a6-bc19b907d9c8 service nova] [instance: 1e71cc99-f026-4197-b7f1-ae18f7f99af6] Updating instance_info_cache with network_info: [{"id": "28222adf-fbb7-4199-a9e2-1f3e4d389e4d", "address": "fa:16:3e:33:e7:5d", "network": {"id": "9fb6d971-d29d-42af-a8df-f5bdda212cf4", "bridge": "br-int", "label": "tempest-ListServerFiltersTestJSON-1248701816-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "56bb35c701074f02820d93ad5911d089", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "7894814c-6be3-4b80-a08e-4a771bc05dd1", "external-id": "nsx-vlan-transportzone-948", "segmentation_id": 948, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap28222adf-fb", "ovs_interfaceid": "28222adf-fbb7-4199-a9e2-1f3e4d389e4d", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61962) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1628.344621] env[61962]: DEBUG oslo_concurrency.lockutils [req-9c9c9e81-622d-475c-bfcf-2b372a34072c req-66497f23-44ce-4fd1-b7a6-bc19b907d9c8 service nova] Releasing lock "refresh_cache-1e71cc99-f026-4197-b7f1-ae18f7f99af6" {{(pid=61962) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1628.854983] env[61962]: DEBUG nova.compute.manager [req-e05a22dd-8656-4964-8251-10ac2c26626a req-2a88c773-e4bd-478b-8cdc-0cada73aea8b service nova] [instance: 2cf5409f-eaff-4b63-b8bb-eb15a7a81760] Received event network-changed-9436547d-6e92-4a1e-b13b-11a30db7a7b3 {{(pid=61962) external_instance_event /opt/stack/nova/nova/compute/manager.py:11210}} [ 1628.855234] env[61962]: DEBUG nova.compute.manager [req-e05a22dd-8656-4964-8251-10ac2c26626a req-2a88c773-e4bd-478b-8cdc-0cada73aea8b service nova] [instance: 2cf5409f-eaff-4b63-b8bb-eb15a7a81760] Refreshing instance network info cache due to event network-changed-9436547d-6e92-4a1e-b13b-11a30db7a7b3. {{(pid=61962) external_instance_event /opt/stack/nova/nova/compute/manager.py:11215}} [ 1628.855555] env[61962]: DEBUG oslo_concurrency.lockutils [req-e05a22dd-8656-4964-8251-10ac2c26626a req-2a88c773-e4bd-478b-8cdc-0cada73aea8b service nova] Acquiring lock "refresh_cache-2cf5409f-eaff-4b63-b8bb-eb15a7a81760" {{(pid=61962) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1628.855653] env[61962]: DEBUG oslo_concurrency.lockutils [req-e05a22dd-8656-4964-8251-10ac2c26626a req-2a88c773-e4bd-478b-8cdc-0cada73aea8b service nova] Acquired lock "refresh_cache-2cf5409f-eaff-4b63-b8bb-eb15a7a81760" {{(pid=61962) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1628.855812] env[61962]: DEBUG nova.network.neutron [req-e05a22dd-8656-4964-8251-10ac2c26626a req-2a88c773-e4bd-478b-8cdc-0cada73aea8b service nova] [instance: 2cf5409f-eaff-4b63-b8bb-eb15a7a81760] Refreshing network info cache for port 9436547d-6e92-4a1e-b13b-11a30db7a7b3 {{(pid=61962) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 1629.117700] env[61962]: DEBUG nova.network.neutron [req-e05a22dd-8656-4964-8251-10ac2c26626a req-2a88c773-e4bd-478b-8cdc-0cada73aea8b service nova] [instance: 2cf5409f-eaff-4b63-b8bb-eb15a7a81760] Updated VIF entry in instance network info cache for port 9436547d-6e92-4a1e-b13b-11a30db7a7b3. {{(pid=61962) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 1629.118165] env[61962]: DEBUG nova.network.neutron [req-e05a22dd-8656-4964-8251-10ac2c26626a req-2a88c773-e4bd-478b-8cdc-0cada73aea8b service nova] [instance: 2cf5409f-eaff-4b63-b8bb-eb15a7a81760] Updating instance_info_cache with network_info: [{"id": "9436547d-6e92-4a1e-b13b-11a30db7a7b3", "address": "fa:16:3e:78:c6:4d", "network": {"id": "9fb6d971-d29d-42af-a8df-f5bdda212cf4", "bridge": "br-int", "label": "tempest-ListServerFiltersTestJSON-1248701816-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "56bb35c701074f02820d93ad5911d089", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "7894814c-6be3-4b80-a08e-4a771bc05dd1", "external-id": "nsx-vlan-transportzone-948", "segmentation_id": 948, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap9436547d-6e", "ovs_interfaceid": "9436547d-6e92-4a1e-b13b-11a30db7a7b3", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61962) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1629.129160] env[61962]: DEBUG oslo_concurrency.lockutils [req-e05a22dd-8656-4964-8251-10ac2c26626a req-2a88c773-e4bd-478b-8cdc-0cada73aea8b service nova] Releasing lock "refresh_cache-2cf5409f-eaff-4b63-b8bb-eb15a7a81760" {{(pid=61962) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1647.158627] env[61962]: DEBUG oslo_service.periodic_task [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Running periodic task ComputeManager.update_available_resource {{(pid=61962) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1647.170976] env[61962]: DEBUG oslo_concurrency.lockutils [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=61962) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1647.171250] env[61962]: DEBUG oslo_concurrency.lockutils [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=61962) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1647.171420] env[61962]: DEBUG oslo_concurrency.lockutils [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=61962) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1647.171579] env[61962]: DEBUG nova.compute.resource_tracker [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=61962) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 1647.172722] env[61962]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a57f1f42-5b67-4af2-8a9f-a0a36c9721f2 {{(pid=61962) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1647.183348] env[61962]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6a82b106-c818-453c-8fd6-8f8559f31726 {{(pid=61962) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1647.198432] env[61962]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7b7d451a-c584-4e42-a3f4-90c24f2c6589 {{(pid=61962) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1647.205460] env[61962]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-645bdedd-12c4-4eab-b834-f856f1ca1df2 {{(pid=61962) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1647.235022] env[61962]: DEBUG nova.compute.resource_tracker [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=180532MB free_disk=96GB free_vcpus=48 pci_devices=None {{(pid=61962) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 1647.235252] env[61962]: DEBUG oslo_concurrency.lockutils [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=61962) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1647.235392] env[61962]: DEBUG oslo_concurrency.lockutils [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=61962) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1647.310169] env[61962]: DEBUG nova.compute.resource_tracker [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Instance 35bf25a7-a3c2-40f1-b415-42d6167b0a1c actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61962) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 1647.310334] env[61962]: DEBUG nova.compute.resource_tracker [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Instance 1f7429fe-e6f3-4b0a-bae8-38ead4607ae7 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61962) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 1647.310461] env[61962]: DEBUG nova.compute.resource_tracker [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Instance c3c36949-754e-4bab-860d-74e0cde4dfd6 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61962) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 1647.310581] env[61962]: DEBUG nova.compute.resource_tracker [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Instance 6f0c884b-afb3-4d06-9dd2-f07099c364e0 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61962) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 1647.310699] env[61962]: DEBUG nova.compute.resource_tracker [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Instance 3d2749f8-18d2-4631-adc2-093988125eb4 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61962) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 1647.310815] env[61962]: DEBUG nova.compute.resource_tracker [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Instance 1035139f-2193-4d1c-a220-369ef6096ae0 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61962) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 1647.310928] env[61962]: DEBUG nova.compute.resource_tracker [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Instance 53048279-c3ea-484f-9a3e-049b12892f7a actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61962) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 1647.311053] env[61962]: DEBUG nova.compute.resource_tracker [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Instance 25f6fa54-3d0d-4470-b5c9-06ba0ad2663d actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61962) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 1647.311168] env[61962]: DEBUG nova.compute.resource_tracker [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Instance 1e71cc99-f026-4197-b7f1-ae18f7f99af6 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61962) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 1647.311280] env[61962]: DEBUG nova.compute.resource_tracker [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Instance 2cf5409f-eaff-4b63-b8bb-eb15a7a81760 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61962) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 1647.322926] env[61962]: DEBUG nova.compute.resource_tracker [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Instance c3514756-a861-4662-b859-0881ced78e19 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61962) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1767}} [ 1647.334156] env[61962]: DEBUG nova.compute.resource_tracker [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Instance 02c198ea-61de-4e70-866c-39781d353d04 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61962) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1767}} [ 1647.334358] env[61962]: DEBUG nova.compute.resource_tracker [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Total usable vcpus: 48, total allocated vcpus: 10 {{(pid=61962) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 1647.334606] env[61962]: DEBUG nova.compute.resource_tracker [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=1792MB phys_disk=100GB used_disk=10GB total_vcpus=48 used_vcpus=10 pci_stats=[] stats={'failed_builds': '74', 'num_instances': '10', 'num_vm_building': '10', 'num_task_deleting': '8', 'num_os_type_None': '10', 'num_proj_a9c2add945d24a299e5bb0fe543deec3': '1', 'io_workload': '10', 'num_proj_e816704de44649099e96127498eed7e9': '1', 'num_proj_eaa1993aea5f4f039f35b32e2321e30f': '1', 'num_proj_a31ea93dd586435ebb82c1dbfd14c2ad': '1', 'num_proj_34477c98bf9c4a87971a62cb96b072c7': '1', 'num_proj_e58b68272c404983993a09ad12e8c03b': '1', 'num_proj_88a7b37d54f047149b05fc80e0b51601': '1', 'num_proj_40bdd99e47aa40a28edd38c6b4a09a5f': '1', 'num_task_spawning': '2', 'num_proj_56bb35c701074f02820d93ad5911d089': '2'} {{(pid=61962) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 1647.488952] env[61962]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d05c865c-6e18-4224-ae9d-ae2f6d56066c {{(pid=61962) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1647.496898] env[61962]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-52a0ee86-97ab-4874-b850-7379e31e49fb {{(pid=61962) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1647.527158] env[61962]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-677526c1-750d-46e3-8ee1-fc48654a3f2d {{(pid=61962) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1647.534582] env[61962]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-256012cf-83d4-4b12-a8cd-fc4a930e5fb6 {{(pid=61962) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1647.547615] env[61962]: DEBUG nova.compute.provider_tree [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Inventory has not changed in ProviderTree for provider: 5a20dc57-fddd-49ec-bab5-953a03eebaa1 {{(pid=61962) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1647.556304] env[61962]: DEBUG nova.scheduler.client.report [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Inventory has not changed for provider 5a20dc57-fddd-49ec-bab5-953a03eebaa1 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 96, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61962) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1647.572110] env[61962]: DEBUG nova.compute.resource_tracker [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=61962) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 1647.572291] env[61962]: DEBUG oslo_concurrency.lockutils [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 0.337s {{(pid=61962) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1650.303470] env[61962]: DEBUG oslo_concurrency.lockutils [None req-2cf95c8f-0d61-4c93-8475-490eb8177967 tempest-ListServerFiltersTestJSON-924365092 tempest-ListServerFiltersTestJSON-924365092-project-member] Acquiring lock "2cf5409f-eaff-4b63-b8bb-eb15a7a81760" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=61962) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1651.572598] env[61962]: DEBUG oslo_service.periodic_task [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=61962) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1651.572854] env[61962]: DEBUG nova.compute.manager [None req-ba337279-4320-40ca-b616-7e590432f203 None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=61962) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10626}} [ 1653.154575] env[61962]: DEBUG oslo_service.periodic_task [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=61962) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1656.158989] env[61962]: DEBUG oslo_service.periodic_task [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=61962) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1657.159418] env[61962]: DEBUG oslo_service.periodic_task [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=61962) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1657.159655] env[61962]: DEBUG nova.compute.manager [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Starting heal instance info cache {{(pid=61962) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10007}} [ 1657.159757] env[61962]: DEBUG nova.compute.manager [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Rebuilding the list of instances to heal {{(pid=61962) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10011}} [ 1657.181444] env[61962]: DEBUG nova.compute.manager [None req-ba337279-4320-40ca-b616-7e590432f203 None None] [instance: 35bf25a7-a3c2-40f1-b415-42d6167b0a1c] Skipping network cache update for instance because it is Building. {{(pid=61962) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10020}} [ 1657.181630] env[61962]: DEBUG nova.compute.manager [None req-ba337279-4320-40ca-b616-7e590432f203 None None] [instance: 1f7429fe-e6f3-4b0a-bae8-38ead4607ae7] Skipping network cache update for instance because it is Building. {{(pid=61962) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10020}} [ 1657.181747] env[61962]: DEBUG nova.compute.manager [None req-ba337279-4320-40ca-b616-7e590432f203 None None] [instance: c3c36949-754e-4bab-860d-74e0cde4dfd6] Skipping network cache update for instance because it is Building. {{(pid=61962) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10020}} [ 1657.181987] env[61962]: DEBUG nova.compute.manager [None req-ba337279-4320-40ca-b616-7e590432f203 None None] [instance: 6f0c884b-afb3-4d06-9dd2-f07099c364e0] Skipping network cache update for instance because it is Building. {{(pid=61962) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10020}} [ 1657.182193] env[61962]: DEBUG nova.compute.manager [None req-ba337279-4320-40ca-b616-7e590432f203 None None] [instance: 3d2749f8-18d2-4631-adc2-093988125eb4] Skipping network cache update for instance because it is Building. {{(pid=61962) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10020}} [ 1657.182292] env[61962]: DEBUG nova.compute.manager [None req-ba337279-4320-40ca-b616-7e590432f203 None None] [instance: 1035139f-2193-4d1c-a220-369ef6096ae0] Skipping network cache update for instance because it is Building. {{(pid=61962) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10020}} [ 1657.182395] env[61962]: DEBUG nova.compute.manager [None req-ba337279-4320-40ca-b616-7e590432f203 None None] [instance: 53048279-c3ea-484f-9a3e-049b12892f7a] Skipping network cache update for instance because it is Building. {{(pid=61962) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10020}} [ 1657.182549] env[61962]: DEBUG nova.compute.manager [None req-ba337279-4320-40ca-b616-7e590432f203 None None] [instance: 25f6fa54-3d0d-4470-b5c9-06ba0ad2663d] Skipping network cache update for instance because it is Building. {{(pid=61962) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10020}} [ 1657.182656] env[61962]: DEBUG nova.compute.manager [None req-ba337279-4320-40ca-b616-7e590432f203 None None] [instance: 1e71cc99-f026-4197-b7f1-ae18f7f99af6] Skipping network cache update for instance because it is Building. {{(pid=61962) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10020}} [ 1657.182762] env[61962]: DEBUG nova.compute.manager [None req-ba337279-4320-40ca-b616-7e590432f203 None None] [instance: 2cf5409f-eaff-4b63-b8bb-eb15a7a81760] Skipping network cache update for instance because it is Building. {{(pid=61962) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10020}} [ 1657.182875] env[61962]: DEBUG nova.compute.manager [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Didn't find any instances for network info cache update. {{(pid=61962) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10093}} [ 1657.183441] env[61962]: DEBUG oslo_service.periodic_task [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=61962) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1661.159440] env[61962]: DEBUG oslo_service.periodic_task [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=61962) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1662.159103] env[61962]: DEBUG oslo_service.periodic_task [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=61962) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1664.158708] env[61962]: DEBUG oslo_service.periodic_task [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=61962) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1672.715402] env[61962]: WARNING oslo_vmware.rw_handles [None req-673cd958-9a31-4e2b-aba4-288eb9a7a931 tempest-ServersTestManualDisk-1747286721 tempest-ServersTestManualDisk-1747286721-project-member] Error occurred while reading the HTTP response.: http.client.RemoteDisconnected: Remote end closed connection without response [ 1672.715402] env[61962]: ERROR oslo_vmware.rw_handles Traceback (most recent call last): [ 1672.715402] env[61962]: ERROR oslo_vmware.rw_handles File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py", line 283, in close [ 1672.715402] env[61962]: ERROR oslo_vmware.rw_handles self._conn.getresponse() [ 1672.715402] env[61962]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 1375, in getresponse [ 1672.715402] env[61962]: ERROR oslo_vmware.rw_handles response.begin() [ 1672.715402] env[61962]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 318, in begin [ 1672.715402] env[61962]: ERROR oslo_vmware.rw_handles version, status, reason = self._read_status() [ 1672.715402] env[61962]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 287, in _read_status [ 1672.715402] env[61962]: ERROR oslo_vmware.rw_handles raise RemoteDisconnected("Remote end closed connection without" [ 1672.715402] env[61962]: ERROR oslo_vmware.rw_handles http.client.RemoteDisconnected: Remote end closed connection without response [ 1672.715402] env[61962]: ERROR oslo_vmware.rw_handles [ 1672.716187] env[61962]: DEBUG nova.virt.vmwareapi.images [None req-673cd958-9a31-4e2b-aba4-288eb9a7a931 tempest-ServersTestManualDisk-1747286721 tempest-ServersTestManualDisk-1747286721-project-member] [instance: 35bf25a7-a3c2-40f1-b415-42d6167b0a1c] Downloaded image file data f684bb17-3ab2-4bd5-a19e-4c36c57d0ebe to vmware_temp/ed91b0c2-226a-4b49-8794-2958a9d7ebab/f684bb17-3ab2-4bd5-a19e-4c36c57d0ebe/tmp-sparse.vmdk on the data store datastore2 {{(pid=61962) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:258}} [ 1672.717847] env[61962]: DEBUG nova.virt.vmwareapi.vmops [None req-673cd958-9a31-4e2b-aba4-288eb9a7a931 tempest-ServersTestManualDisk-1747286721 tempest-ServersTestManualDisk-1747286721-project-member] [instance: 35bf25a7-a3c2-40f1-b415-42d6167b0a1c] Caching image {{(pid=61962) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 1672.718117] env[61962]: DEBUG nova.virt.vmwareapi.vm_util [None req-673cd958-9a31-4e2b-aba4-288eb9a7a931 tempest-ServersTestManualDisk-1747286721 tempest-ServersTestManualDisk-1747286721-project-member] Copying Virtual Disk [datastore2] vmware_temp/ed91b0c2-226a-4b49-8794-2958a9d7ebab/f684bb17-3ab2-4bd5-a19e-4c36c57d0ebe/tmp-sparse.vmdk to [datastore2] vmware_temp/ed91b0c2-226a-4b49-8794-2958a9d7ebab/f684bb17-3ab2-4bd5-a19e-4c36c57d0ebe/f684bb17-3ab2-4bd5-a19e-4c36c57d0ebe.vmdk {{(pid=61962) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 1672.718426] env[61962]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-5d7044f7-dad5-4058-8465-b5c9ea9a3798 {{(pid=61962) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1672.726966] env[61962]: DEBUG oslo_vmware.api [None req-673cd958-9a31-4e2b-aba4-288eb9a7a931 tempest-ServersTestManualDisk-1747286721 tempest-ServersTestManualDisk-1747286721-project-member] Waiting for the task: (returnval){ [ 1672.726966] env[61962]: value = "task-4892067" [ 1672.726966] env[61962]: _type = "Task" [ 1672.726966] env[61962]: } to complete. {{(pid=61962) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1672.735868] env[61962]: DEBUG oslo_vmware.api [None req-673cd958-9a31-4e2b-aba4-288eb9a7a931 tempest-ServersTestManualDisk-1747286721 tempest-ServersTestManualDisk-1747286721-project-member] Task: {'id': task-4892067, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61962) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1673.237312] env[61962]: DEBUG oslo_vmware.exceptions [None req-673cd958-9a31-4e2b-aba4-288eb9a7a931 tempest-ServersTestManualDisk-1747286721 tempest-ServersTestManualDisk-1747286721-project-member] Fault InvalidArgument not matched. {{(pid=61962) get_fault_class /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/exceptions.py:290}} [ 1673.237595] env[61962]: DEBUG oslo_concurrency.lockutils [None req-673cd958-9a31-4e2b-aba4-288eb9a7a931 tempest-ServersTestManualDisk-1747286721 tempest-ServersTestManualDisk-1747286721-project-member] Releasing lock "[datastore2] devstack-image-cache_base/f684bb17-3ab2-4bd5-a19e-4c36c57d0ebe/f684bb17-3ab2-4bd5-a19e-4c36c57d0ebe.vmdk" {{(pid=61962) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1673.238169] env[61962]: ERROR nova.compute.manager [None req-673cd958-9a31-4e2b-aba4-288eb9a7a931 tempest-ServersTestManualDisk-1747286721 tempest-ServersTestManualDisk-1747286721-project-member] [instance: 35bf25a7-a3c2-40f1-b415-42d6167b0a1c] Instance failed to spawn: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1673.238169] env[61962]: Faults: ['InvalidArgument'] [ 1673.238169] env[61962]: ERROR nova.compute.manager [instance: 35bf25a7-a3c2-40f1-b415-42d6167b0a1c] Traceback (most recent call last): [ 1673.238169] env[61962]: ERROR nova.compute.manager [instance: 35bf25a7-a3c2-40f1-b415-42d6167b0a1c] File "/opt/stack/nova/nova/compute/manager.py", line 2886, in _build_resources [ 1673.238169] env[61962]: ERROR nova.compute.manager [instance: 35bf25a7-a3c2-40f1-b415-42d6167b0a1c] yield resources [ 1673.238169] env[61962]: ERROR nova.compute.manager [instance: 35bf25a7-a3c2-40f1-b415-42d6167b0a1c] File "/opt/stack/nova/nova/compute/manager.py", line 2633, in _build_and_run_instance [ 1673.238169] env[61962]: ERROR nova.compute.manager [instance: 35bf25a7-a3c2-40f1-b415-42d6167b0a1c] self.driver.spawn(context, instance, image_meta, [ 1673.238169] env[61962]: ERROR nova.compute.manager [instance: 35bf25a7-a3c2-40f1-b415-42d6167b0a1c] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 1673.238169] env[61962]: ERROR nova.compute.manager [instance: 35bf25a7-a3c2-40f1-b415-42d6167b0a1c] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1673.238169] env[61962]: ERROR nova.compute.manager [instance: 35bf25a7-a3c2-40f1-b415-42d6167b0a1c] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 1673.238169] env[61962]: ERROR nova.compute.manager [instance: 35bf25a7-a3c2-40f1-b415-42d6167b0a1c] self._fetch_image_if_missing(context, vi) [ 1673.238169] env[61962]: ERROR nova.compute.manager [instance: 35bf25a7-a3c2-40f1-b415-42d6167b0a1c] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 1673.238592] env[61962]: ERROR nova.compute.manager [instance: 35bf25a7-a3c2-40f1-b415-42d6167b0a1c] image_cache(vi, tmp_image_ds_loc) [ 1673.238592] env[61962]: ERROR nova.compute.manager [instance: 35bf25a7-a3c2-40f1-b415-42d6167b0a1c] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 1673.238592] env[61962]: ERROR nova.compute.manager [instance: 35bf25a7-a3c2-40f1-b415-42d6167b0a1c] vm_util.copy_virtual_disk( [ 1673.238592] env[61962]: ERROR nova.compute.manager [instance: 35bf25a7-a3c2-40f1-b415-42d6167b0a1c] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 1673.238592] env[61962]: ERROR nova.compute.manager [instance: 35bf25a7-a3c2-40f1-b415-42d6167b0a1c] session._wait_for_task(vmdk_copy_task) [ 1673.238592] env[61962]: ERROR nova.compute.manager [instance: 35bf25a7-a3c2-40f1-b415-42d6167b0a1c] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 1673.238592] env[61962]: ERROR nova.compute.manager [instance: 35bf25a7-a3c2-40f1-b415-42d6167b0a1c] return self.wait_for_task(task_ref) [ 1673.238592] env[61962]: ERROR nova.compute.manager [instance: 35bf25a7-a3c2-40f1-b415-42d6167b0a1c] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 1673.238592] env[61962]: ERROR nova.compute.manager [instance: 35bf25a7-a3c2-40f1-b415-42d6167b0a1c] return evt.wait() [ 1673.238592] env[61962]: ERROR nova.compute.manager [instance: 35bf25a7-a3c2-40f1-b415-42d6167b0a1c] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 1673.238592] env[61962]: ERROR nova.compute.manager [instance: 35bf25a7-a3c2-40f1-b415-42d6167b0a1c] result = hub.switch() [ 1673.238592] env[61962]: ERROR nova.compute.manager [instance: 35bf25a7-a3c2-40f1-b415-42d6167b0a1c] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 1673.238592] env[61962]: ERROR nova.compute.manager [instance: 35bf25a7-a3c2-40f1-b415-42d6167b0a1c] return self.greenlet.switch() [ 1673.238995] env[61962]: ERROR nova.compute.manager [instance: 35bf25a7-a3c2-40f1-b415-42d6167b0a1c] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 1673.238995] env[61962]: ERROR nova.compute.manager [instance: 35bf25a7-a3c2-40f1-b415-42d6167b0a1c] self.f(*self.args, **self.kw) [ 1673.238995] env[61962]: ERROR nova.compute.manager [instance: 35bf25a7-a3c2-40f1-b415-42d6167b0a1c] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 1673.238995] env[61962]: ERROR nova.compute.manager [instance: 35bf25a7-a3c2-40f1-b415-42d6167b0a1c] raise exceptions.translate_fault(task_info.error) [ 1673.238995] env[61962]: ERROR nova.compute.manager [instance: 35bf25a7-a3c2-40f1-b415-42d6167b0a1c] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1673.238995] env[61962]: ERROR nova.compute.manager [instance: 35bf25a7-a3c2-40f1-b415-42d6167b0a1c] Faults: ['InvalidArgument'] [ 1673.238995] env[61962]: ERROR nova.compute.manager [instance: 35bf25a7-a3c2-40f1-b415-42d6167b0a1c] [ 1673.238995] env[61962]: INFO nova.compute.manager [None req-673cd958-9a31-4e2b-aba4-288eb9a7a931 tempest-ServersTestManualDisk-1747286721 tempest-ServersTestManualDisk-1747286721-project-member] [instance: 35bf25a7-a3c2-40f1-b415-42d6167b0a1c] Terminating instance [ 1673.240133] env[61962]: DEBUG oslo_concurrency.lockutils [None req-e958cfa4-b00d-4980-bc5a-c65b69f7ca2e tempest-FloatingIPsAssociationNegativeTestJSON-1886607009 tempest-FloatingIPsAssociationNegativeTestJSON-1886607009-project-member] Acquired lock "[datastore2] devstack-image-cache_base/f684bb17-3ab2-4bd5-a19e-4c36c57d0ebe/f684bb17-3ab2-4bd5-a19e-4c36c57d0ebe.vmdk" {{(pid=61962) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1673.240354] env[61962]: DEBUG nova.virt.vmwareapi.ds_util [None req-e958cfa4-b00d-4980-bc5a-c65b69f7ca2e tempest-FloatingIPsAssociationNegativeTestJSON-1886607009 tempest-FloatingIPsAssociationNegativeTestJSON-1886607009-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=61962) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1673.240595] env[61962]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-4e124894-145b-42de-8d6b-8556adf75422 {{(pid=61962) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1673.244058] env[61962]: DEBUG nova.compute.manager [None req-673cd958-9a31-4e2b-aba4-288eb9a7a931 tempest-ServersTestManualDisk-1747286721 tempest-ServersTestManualDisk-1747286721-project-member] [instance: 35bf25a7-a3c2-40f1-b415-42d6167b0a1c] Start destroying the instance on the hypervisor. {{(pid=61962) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3142}} [ 1673.244254] env[61962]: DEBUG nova.virt.vmwareapi.vmops [None req-673cd958-9a31-4e2b-aba4-288eb9a7a931 tempest-ServersTestManualDisk-1747286721 tempest-ServersTestManualDisk-1747286721-project-member] [instance: 35bf25a7-a3c2-40f1-b415-42d6167b0a1c] Destroying instance {{(pid=61962) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1673.244974] env[61962]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-14616a1f-50f5-4953-b2bc-34375823bde4 {{(pid=61962) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1673.252327] env[61962]: DEBUG nova.virt.vmwareapi.vmops [None req-673cd958-9a31-4e2b-aba4-288eb9a7a931 tempest-ServersTestManualDisk-1747286721 tempest-ServersTestManualDisk-1747286721-project-member] [instance: 35bf25a7-a3c2-40f1-b415-42d6167b0a1c] Unregistering the VM {{(pid=61962) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 1673.252564] env[61962]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-0bf08a19-1a66-4c0f-825d-a8613b563332 {{(pid=61962) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1673.254975] env[61962]: DEBUG nova.virt.vmwareapi.ds_util [None req-e958cfa4-b00d-4980-bc5a-c65b69f7ca2e tempest-FloatingIPsAssociationNegativeTestJSON-1886607009 tempest-FloatingIPsAssociationNegativeTestJSON-1886607009-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=61962) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1673.255155] env[61962]: DEBUG nova.virt.vmwareapi.vmops [None req-e958cfa4-b00d-4980-bc5a-c65b69f7ca2e tempest-FloatingIPsAssociationNegativeTestJSON-1886607009 tempest-FloatingIPsAssociationNegativeTestJSON-1886607009-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=61962) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 1673.256179] env[61962]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-3c858d7c-8ffc-4625-8bba-c9dd0fe9bff7 {{(pid=61962) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1673.261698] env[61962]: DEBUG oslo_vmware.api [None req-e958cfa4-b00d-4980-bc5a-c65b69f7ca2e tempest-FloatingIPsAssociationNegativeTestJSON-1886607009 tempest-FloatingIPsAssociationNegativeTestJSON-1886607009-project-member] Waiting for the task: (returnval){ [ 1673.261698] env[61962]: value = "session[5210918f-aadc-9b29-42fa-0929c2e42627]52cb13d2-2e49-df87-25dc-b8c3b8b6181d" [ 1673.261698] env[61962]: _type = "Task" [ 1673.261698] env[61962]: } to complete. {{(pid=61962) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1673.270187] env[61962]: DEBUG oslo_vmware.api [None req-e958cfa4-b00d-4980-bc5a-c65b69f7ca2e tempest-FloatingIPsAssociationNegativeTestJSON-1886607009 tempest-FloatingIPsAssociationNegativeTestJSON-1886607009-project-member] Task: {'id': session[5210918f-aadc-9b29-42fa-0929c2e42627]52cb13d2-2e49-df87-25dc-b8c3b8b6181d, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61962) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1673.336620] env[61962]: DEBUG nova.virt.vmwareapi.vmops [None req-673cd958-9a31-4e2b-aba4-288eb9a7a931 tempest-ServersTestManualDisk-1747286721 tempest-ServersTestManualDisk-1747286721-project-member] [instance: 35bf25a7-a3c2-40f1-b415-42d6167b0a1c] Unregistered the VM {{(pid=61962) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 1673.336886] env[61962]: DEBUG nova.virt.vmwareapi.vmops [None req-673cd958-9a31-4e2b-aba4-288eb9a7a931 tempest-ServersTestManualDisk-1747286721 tempest-ServersTestManualDisk-1747286721-project-member] [instance: 35bf25a7-a3c2-40f1-b415-42d6167b0a1c] Deleting contents of the VM from datastore datastore2 {{(pid=61962) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 1673.337021] env[61962]: DEBUG nova.virt.vmwareapi.ds_util [None req-673cd958-9a31-4e2b-aba4-288eb9a7a931 tempest-ServersTestManualDisk-1747286721 tempest-ServersTestManualDisk-1747286721-project-member] Deleting the datastore file [datastore2] 35bf25a7-a3c2-40f1-b415-42d6167b0a1c {{(pid=61962) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1673.337301] env[61962]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-839bad50-baa3-4242-8bd4-8e39aa5ff443 {{(pid=61962) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1673.344454] env[61962]: DEBUG oslo_vmware.api [None req-673cd958-9a31-4e2b-aba4-288eb9a7a931 tempest-ServersTestManualDisk-1747286721 tempest-ServersTestManualDisk-1747286721-project-member] Waiting for the task: (returnval){ [ 1673.344454] env[61962]: value = "task-4892069" [ 1673.344454] env[61962]: _type = "Task" [ 1673.344454] env[61962]: } to complete. {{(pid=61962) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1673.352660] env[61962]: DEBUG oslo_vmware.api [None req-673cd958-9a31-4e2b-aba4-288eb9a7a931 tempest-ServersTestManualDisk-1747286721 tempest-ServersTestManualDisk-1747286721-project-member] Task: {'id': task-4892069, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61962) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1673.772865] env[61962]: DEBUG nova.virt.vmwareapi.vmops [None req-e958cfa4-b00d-4980-bc5a-c65b69f7ca2e tempest-FloatingIPsAssociationNegativeTestJSON-1886607009 tempest-FloatingIPsAssociationNegativeTestJSON-1886607009-project-member] [instance: 1f7429fe-e6f3-4b0a-bae8-38ead4607ae7] Preparing fetch location {{(pid=61962) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 1673.773253] env[61962]: DEBUG nova.virt.vmwareapi.ds_util [None req-e958cfa4-b00d-4980-bc5a-c65b69f7ca2e tempest-FloatingIPsAssociationNegativeTestJSON-1886607009 tempest-FloatingIPsAssociationNegativeTestJSON-1886607009-project-member] Creating directory with path [datastore2] vmware_temp/17bbf8bd-158c-4f73-a42f-38f109c632ad/f684bb17-3ab2-4bd5-a19e-4c36c57d0ebe {{(pid=61962) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1673.773395] env[61962]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-4326cbe6-af7f-4000-ad7f-9bd6aafeffd0 {{(pid=61962) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1673.785586] env[61962]: DEBUG nova.virt.vmwareapi.ds_util [None req-e958cfa4-b00d-4980-bc5a-c65b69f7ca2e tempest-FloatingIPsAssociationNegativeTestJSON-1886607009 tempest-FloatingIPsAssociationNegativeTestJSON-1886607009-project-member] Created directory with path [datastore2] vmware_temp/17bbf8bd-158c-4f73-a42f-38f109c632ad/f684bb17-3ab2-4bd5-a19e-4c36c57d0ebe {{(pid=61962) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1673.785783] env[61962]: DEBUG nova.virt.vmwareapi.vmops [None req-e958cfa4-b00d-4980-bc5a-c65b69f7ca2e tempest-FloatingIPsAssociationNegativeTestJSON-1886607009 tempest-FloatingIPsAssociationNegativeTestJSON-1886607009-project-member] [instance: 1f7429fe-e6f3-4b0a-bae8-38ead4607ae7] Fetch image to [datastore2] vmware_temp/17bbf8bd-158c-4f73-a42f-38f109c632ad/f684bb17-3ab2-4bd5-a19e-4c36c57d0ebe/tmp-sparse.vmdk {{(pid=61962) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 1673.785963] env[61962]: DEBUG nova.virt.vmwareapi.vmops [None req-e958cfa4-b00d-4980-bc5a-c65b69f7ca2e tempest-FloatingIPsAssociationNegativeTestJSON-1886607009 tempest-FloatingIPsAssociationNegativeTestJSON-1886607009-project-member] [instance: 1f7429fe-e6f3-4b0a-bae8-38ead4607ae7] Downloading image file data f684bb17-3ab2-4bd5-a19e-4c36c57d0ebe to [datastore2] vmware_temp/17bbf8bd-158c-4f73-a42f-38f109c632ad/f684bb17-3ab2-4bd5-a19e-4c36c57d0ebe/tmp-sparse.vmdk on the data store datastore2 {{(pid=61962) _fetch_image_as_file /opt/stack/nova/nova/virt/vmwareapi/vmops.py:399}} [ 1673.786751] env[61962]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3f239f7a-687c-40b0-bc27-b8a52d70f188 {{(pid=61962) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1673.793947] env[61962]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-13e5c038-b655-4805-a4be-82ed3d73f304 {{(pid=61962) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1673.803289] env[61962]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-84253d83-65d5-492f-b235-7458cbbf4142 {{(pid=61962) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1673.834726] env[61962]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8640d405-fea4-4df3-a44f-9ce3b53c2dae {{(pid=61962) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1673.842374] env[61962]: DEBUG oslo_vmware.service [-] Invoking SessionManager.AcquireGenericServiceTicket with opID=oslo.vmware-42a2db75-ca37-42d7-99e1-9e50e97df86e {{(pid=61962) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1673.853287] env[61962]: DEBUG oslo_vmware.api [None req-673cd958-9a31-4e2b-aba4-288eb9a7a931 tempest-ServersTestManualDisk-1747286721 tempest-ServersTestManualDisk-1747286721-project-member] Task: {'id': task-4892069, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.077986} completed successfully. {{(pid=61962) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1673.853544] env[61962]: DEBUG nova.virt.vmwareapi.ds_util [None req-673cd958-9a31-4e2b-aba4-288eb9a7a931 tempest-ServersTestManualDisk-1747286721 tempest-ServersTestManualDisk-1747286721-project-member] Deleted the datastore file {{(pid=61962) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1673.853696] env[61962]: DEBUG nova.virt.vmwareapi.vmops [None req-673cd958-9a31-4e2b-aba4-288eb9a7a931 tempest-ServersTestManualDisk-1747286721 tempest-ServersTestManualDisk-1747286721-project-member] [instance: 35bf25a7-a3c2-40f1-b415-42d6167b0a1c] Deleted contents of the VM from datastore datastore2 {{(pid=61962) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 1673.853914] env[61962]: DEBUG nova.virt.vmwareapi.vmops [None req-673cd958-9a31-4e2b-aba4-288eb9a7a931 tempest-ServersTestManualDisk-1747286721 tempest-ServersTestManualDisk-1747286721-project-member] [instance: 35bf25a7-a3c2-40f1-b415-42d6167b0a1c] Instance destroyed {{(pid=61962) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1673.854135] env[61962]: INFO nova.compute.manager [None req-673cd958-9a31-4e2b-aba4-288eb9a7a931 tempest-ServersTestManualDisk-1747286721 tempest-ServersTestManualDisk-1747286721-project-member] [instance: 35bf25a7-a3c2-40f1-b415-42d6167b0a1c] Took 0.61 seconds to destroy the instance on the hypervisor. [ 1673.856652] env[61962]: DEBUG nova.compute.claims [None req-673cd958-9a31-4e2b-aba4-288eb9a7a931 tempest-ServersTestManualDisk-1747286721 tempest-ServersTestManualDisk-1747286721-project-member] [instance: 35bf25a7-a3c2-40f1-b415-42d6167b0a1c] Aborting claim: {{(pid=61962) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 1673.856804] env[61962]: DEBUG oslo_concurrency.lockutils [None req-673cd958-9a31-4e2b-aba4-288eb9a7a931 tempest-ServersTestManualDisk-1747286721 tempest-ServersTestManualDisk-1747286721-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=61962) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1673.857043] env[61962]: DEBUG oslo_concurrency.lockutils [None req-673cd958-9a31-4e2b-aba4-288eb9a7a931 tempest-ServersTestManualDisk-1747286721 tempest-ServersTestManualDisk-1747286721-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 0.000s {{(pid=61962) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1673.868914] env[61962]: DEBUG nova.virt.vmwareapi.images [None req-e958cfa4-b00d-4980-bc5a-c65b69f7ca2e tempest-FloatingIPsAssociationNegativeTestJSON-1886607009 tempest-FloatingIPsAssociationNegativeTestJSON-1886607009-project-member] [instance: 1f7429fe-e6f3-4b0a-bae8-38ead4607ae7] Downloading image file data f684bb17-3ab2-4bd5-a19e-4c36c57d0ebe to the data store datastore2 {{(pid=61962) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:245}} [ 1673.923762] env[61962]: DEBUG oslo_vmware.rw_handles [None req-e958cfa4-b00d-4980-bc5a-c65b69f7ca2e tempest-FloatingIPsAssociationNegativeTestJSON-1886607009 tempest-FloatingIPsAssociationNegativeTestJSON-1886607009-project-member] Creating HTTP connection to write to file with size = 21318656 and URL = https://esx7c1n3.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/17bbf8bd-158c-4f73-a42f-38f109c632ad/f684bb17-3ab2-4bd5-a19e-4c36c57d0ebe/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=61962) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 1673.984267] env[61962]: DEBUG oslo_vmware.rw_handles [None req-e958cfa4-b00d-4980-bc5a-c65b69f7ca2e tempest-FloatingIPsAssociationNegativeTestJSON-1886607009 tempest-FloatingIPsAssociationNegativeTestJSON-1886607009-project-member] Completed reading data from the image iterator. {{(pid=61962) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 1673.984451] env[61962]: DEBUG oslo_vmware.rw_handles [None req-e958cfa4-b00d-4980-bc5a-c65b69f7ca2e tempest-FloatingIPsAssociationNegativeTestJSON-1886607009 tempest-FloatingIPsAssociationNegativeTestJSON-1886607009-project-member] Closing write handle for https://esx7c1n3.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/17bbf8bd-158c-4f73-a42f-38f109c632ad/f684bb17-3ab2-4bd5-a19e-4c36c57d0ebe/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=61962) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:281}} [ 1674.102451] env[61962]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-94e00bb3-ece4-4463-9995-a3cded1c664e {{(pid=61962) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1674.113244] env[61962]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-37da5e59-001c-4676-a23c-3df675ff2238 {{(pid=61962) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1674.145784] env[61962]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9026376c-7225-40d4-98c6-72253ef2ab8f {{(pid=61962) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1674.154177] env[61962]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d6df02e4-b651-41c9-8b8b-5b5c669ef0ca {{(pid=61962) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1674.167979] env[61962]: DEBUG nova.compute.provider_tree [None req-673cd958-9a31-4e2b-aba4-288eb9a7a931 tempest-ServersTestManualDisk-1747286721 tempest-ServersTestManualDisk-1747286721-project-member] Inventory has not changed in ProviderTree for provider: 5a20dc57-fddd-49ec-bab5-953a03eebaa1 {{(pid=61962) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1674.177608] env[61962]: DEBUG nova.scheduler.client.report [None req-673cd958-9a31-4e2b-aba4-288eb9a7a931 tempest-ServersTestManualDisk-1747286721 tempest-ServersTestManualDisk-1747286721-project-member] Inventory has not changed for provider 5a20dc57-fddd-49ec-bab5-953a03eebaa1 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 96, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61962) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1674.194643] env[61962]: DEBUG oslo_concurrency.lockutils [None req-673cd958-9a31-4e2b-aba4-288eb9a7a931 tempest-ServersTestManualDisk-1747286721 tempest-ServersTestManualDisk-1747286721-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 0.337s {{(pid=61962) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1674.195289] env[61962]: ERROR nova.compute.manager [None req-673cd958-9a31-4e2b-aba4-288eb9a7a931 tempest-ServersTestManualDisk-1747286721 tempest-ServersTestManualDisk-1747286721-project-member] [instance: 35bf25a7-a3c2-40f1-b415-42d6167b0a1c] Failed to build and run instance: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1674.195289] env[61962]: Faults: ['InvalidArgument'] [ 1674.195289] env[61962]: ERROR nova.compute.manager [instance: 35bf25a7-a3c2-40f1-b415-42d6167b0a1c] Traceback (most recent call last): [ 1674.195289] env[61962]: ERROR nova.compute.manager [instance: 35bf25a7-a3c2-40f1-b415-42d6167b0a1c] File "/opt/stack/nova/nova/compute/manager.py", line 2633, in _build_and_run_instance [ 1674.195289] env[61962]: ERROR nova.compute.manager [instance: 35bf25a7-a3c2-40f1-b415-42d6167b0a1c] self.driver.spawn(context, instance, image_meta, [ 1674.195289] env[61962]: ERROR nova.compute.manager [instance: 35bf25a7-a3c2-40f1-b415-42d6167b0a1c] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 1674.195289] env[61962]: ERROR nova.compute.manager [instance: 35bf25a7-a3c2-40f1-b415-42d6167b0a1c] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1674.195289] env[61962]: ERROR nova.compute.manager [instance: 35bf25a7-a3c2-40f1-b415-42d6167b0a1c] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 1674.195289] env[61962]: ERROR nova.compute.manager [instance: 35bf25a7-a3c2-40f1-b415-42d6167b0a1c] self._fetch_image_if_missing(context, vi) [ 1674.195289] env[61962]: ERROR nova.compute.manager [instance: 35bf25a7-a3c2-40f1-b415-42d6167b0a1c] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 1674.195289] env[61962]: ERROR nova.compute.manager [instance: 35bf25a7-a3c2-40f1-b415-42d6167b0a1c] image_cache(vi, tmp_image_ds_loc) [ 1674.195289] env[61962]: ERROR nova.compute.manager [instance: 35bf25a7-a3c2-40f1-b415-42d6167b0a1c] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 1674.195624] env[61962]: ERROR nova.compute.manager [instance: 35bf25a7-a3c2-40f1-b415-42d6167b0a1c] vm_util.copy_virtual_disk( [ 1674.195624] env[61962]: ERROR nova.compute.manager [instance: 35bf25a7-a3c2-40f1-b415-42d6167b0a1c] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 1674.195624] env[61962]: ERROR nova.compute.manager [instance: 35bf25a7-a3c2-40f1-b415-42d6167b0a1c] session._wait_for_task(vmdk_copy_task) [ 1674.195624] env[61962]: ERROR nova.compute.manager [instance: 35bf25a7-a3c2-40f1-b415-42d6167b0a1c] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 1674.195624] env[61962]: ERROR nova.compute.manager [instance: 35bf25a7-a3c2-40f1-b415-42d6167b0a1c] return self.wait_for_task(task_ref) [ 1674.195624] env[61962]: ERROR nova.compute.manager [instance: 35bf25a7-a3c2-40f1-b415-42d6167b0a1c] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 1674.195624] env[61962]: ERROR nova.compute.manager [instance: 35bf25a7-a3c2-40f1-b415-42d6167b0a1c] return evt.wait() [ 1674.195624] env[61962]: ERROR nova.compute.manager [instance: 35bf25a7-a3c2-40f1-b415-42d6167b0a1c] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 1674.195624] env[61962]: ERROR nova.compute.manager [instance: 35bf25a7-a3c2-40f1-b415-42d6167b0a1c] result = hub.switch() [ 1674.195624] env[61962]: ERROR nova.compute.manager [instance: 35bf25a7-a3c2-40f1-b415-42d6167b0a1c] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 1674.195624] env[61962]: ERROR nova.compute.manager [instance: 35bf25a7-a3c2-40f1-b415-42d6167b0a1c] return self.greenlet.switch() [ 1674.195624] env[61962]: ERROR nova.compute.manager [instance: 35bf25a7-a3c2-40f1-b415-42d6167b0a1c] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 1674.195624] env[61962]: ERROR nova.compute.manager [instance: 35bf25a7-a3c2-40f1-b415-42d6167b0a1c] self.f(*self.args, **self.kw) [ 1674.195956] env[61962]: ERROR nova.compute.manager [instance: 35bf25a7-a3c2-40f1-b415-42d6167b0a1c] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 1674.195956] env[61962]: ERROR nova.compute.manager [instance: 35bf25a7-a3c2-40f1-b415-42d6167b0a1c] raise exceptions.translate_fault(task_info.error) [ 1674.195956] env[61962]: ERROR nova.compute.manager [instance: 35bf25a7-a3c2-40f1-b415-42d6167b0a1c] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1674.195956] env[61962]: ERROR nova.compute.manager [instance: 35bf25a7-a3c2-40f1-b415-42d6167b0a1c] Faults: ['InvalidArgument'] [ 1674.195956] env[61962]: ERROR nova.compute.manager [instance: 35bf25a7-a3c2-40f1-b415-42d6167b0a1c] [ 1674.196106] env[61962]: DEBUG nova.compute.utils [None req-673cd958-9a31-4e2b-aba4-288eb9a7a931 tempest-ServersTestManualDisk-1747286721 tempest-ServersTestManualDisk-1747286721-project-member] [instance: 35bf25a7-a3c2-40f1-b415-42d6167b0a1c] VimFaultException {{(pid=61962) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 1674.197953] env[61962]: DEBUG nova.compute.manager [None req-673cd958-9a31-4e2b-aba4-288eb9a7a931 tempest-ServersTestManualDisk-1747286721 tempest-ServersTestManualDisk-1747286721-project-member] [instance: 35bf25a7-a3c2-40f1-b415-42d6167b0a1c] Build of instance 35bf25a7-a3c2-40f1-b415-42d6167b0a1c was re-scheduled: A specified parameter was not correct: fileType [ 1674.197953] env[61962]: Faults: ['InvalidArgument'] {{(pid=61962) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2472}} [ 1674.198382] env[61962]: DEBUG nova.compute.manager [None req-673cd958-9a31-4e2b-aba4-288eb9a7a931 tempest-ServersTestManualDisk-1747286721 tempest-ServersTestManualDisk-1747286721-project-member] [instance: 35bf25a7-a3c2-40f1-b415-42d6167b0a1c] Unplugging VIFs for instance {{(pid=61962) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:2998}} [ 1674.198561] env[61962]: DEBUG nova.compute.manager [None req-673cd958-9a31-4e2b-aba4-288eb9a7a931 tempest-ServersTestManualDisk-1747286721 tempest-ServersTestManualDisk-1747286721-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=61962) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3021}} [ 1674.198732] env[61962]: DEBUG nova.compute.manager [None req-673cd958-9a31-4e2b-aba4-288eb9a7a931 tempest-ServersTestManualDisk-1747286721 tempest-ServersTestManualDisk-1747286721-project-member] [instance: 35bf25a7-a3c2-40f1-b415-42d6167b0a1c] Deallocating network for instance {{(pid=61962) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2281}} [ 1674.198897] env[61962]: DEBUG nova.network.neutron [None req-673cd958-9a31-4e2b-aba4-288eb9a7a931 tempest-ServersTestManualDisk-1747286721 tempest-ServersTestManualDisk-1747286721-project-member] [instance: 35bf25a7-a3c2-40f1-b415-42d6167b0a1c] deallocate_for_instance() {{(pid=61962) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1674.620750] env[61962]: DEBUG nova.network.neutron [None req-673cd958-9a31-4e2b-aba4-288eb9a7a931 tempest-ServersTestManualDisk-1747286721 tempest-ServersTestManualDisk-1747286721-project-member] [instance: 35bf25a7-a3c2-40f1-b415-42d6167b0a1c] Updating instance_info_cache with network_info: [] {{(pid=61962) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1674.636169] env[61962]: INFO nova.compute.manager [None req-673cd958-9a31-4e2b-aba4-288eb9a7a931 tempest-ServersTestManualDisk-1747286721 tempest-ServersTestManualDisk-1747286721-project-member] [instance: 35bf25a7-a3c2-40f1-b415-42d6167b0a1c] Took 0.44 seconds to deallocate network for instance. [ 1674.738051] env[61962]: INFO nova.scheduler.client.report [None req-673cd958-9a31-4e2b-aba4-288eb9a7a931 tempest-ServersTestManualDisk-1747286721 tempest-ServersTestManualDisk-1747286721-project-member] Deleted allocations for instance 35bf25a7-a3c2-40f1-b415-42d6167b0a1c [ 1674.760928] env[61962]: DEBUG oslo_concurrency.lockutils [None req-673cd958-9a31-4e2b-aba4-288eb9a7a931 tempest-ServersTestManualDisk-1747286721 tempest-ServersTestManualDisk-1747286721-project-member] Lock "35bf25a7-a3c2-40f1-b415-42d6167b0a1c" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 616.678s {{(pid=61962) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1674.762697] env[61962]: DEBUG oslo_concurrency.lockutils [None req-167bc033-d799-42bb-8ef9-8056818dfa51 tempest-ServersTestManualDisk-1747286721 tempest-ServersTestManualDisk-1747286721-project-member] Lock "35bf25a7-a3c2-40f1-b415-42d6167b0a1c" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 421.189s {{(pid=61962) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1674.763198] env[61962]: DEBUG oslo_concurrency.lockutils [None req-167bc033-d799-42bb-8ef9-8056818dfa51 tempest-ServersTestManualDisk-1747286721 tempest-ServersTestManualDisk-1747286721-project-member] Acquiring lock "35bf25a7-a3c2-40f1-b415-42d6167b0a1c-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=61962) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1674.763198] env[61962]: DEBUG oslo_concurrency.lockutils [None req-167bc033-d799-42bb-8ef9-8056818dfa51 tempest-ServersTestManualDisk-1747286721 tempest-ServersTestManualDisk-1747286721-project-member] Lock "35bf25a7-a3c2-40f1-b415-42d6167b0a1c-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=61962) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1674.763402] env[61962]: DEBUG oslo_concurrency.lockutils [None req-167bc033-d799-42bb-8ef9-8056818dfa51 tempest-ServersTestManualDisk-1747286721 tempest-ServersTestManualDisk-1747286721-project-member] Lock "35bf25a7-a3c2-40f1-b415-42d6167b0a1c-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=61962) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1674.766222] env[61962]: INFO nova.compute.manager [None req-167bc033-d799-42bb-8ef9-8056818dfa51 tempest-ServersTestManualDisk-1747286721 tempest-ServersTestManualDisk-1747286721-project-member] [instance: 35bf25a7-a3c2-40f1-b415-42d6167b0a1c] Terminating instance [ 1674.768288] env[61962]: DEBUG nova.compute.manager [None req-167bc033-d799-42bb-8ef9-8056818dfa51 tempest-ServersTestManualDisk-1747286721 tempest-ServersTestManualDisk-1747286721-project-member] [instance: 35bf25a7-a3c2-40f1-b415-42d6167b0a1c] Start destroying the instance on the hypervisor. {{(pid=61962) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3142}} [ 1674.768514] env[61962]: DEBUG nova.virt.vmwareapi.vmops [None req-167bc033-d799-42bb-8ef9-8056818dfa51 tempest-ServersTestManualDisk-1747286721 tempest-ServersTestManualDisk-1747286721-project-member] [instance: 35bf25a7-a3c2-40f1-b415-42d6167b0a1c] Destroying instance {{(pid=61962) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1674.768741] env[61962]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-2f2581ea-a4c3-4578-ad47-64c5f9a431ce {{(pid=61962) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1674.779439] env[61962]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bbd250b4-82d8-4117-9d14-f954c4614bce {{(pid=61962) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1674.791410] env[61962]: DEBUG nova.compute.manager [None req-094311a2-9f80-4d6b-9521-9fcfd74b3cac tempest-ListServerFiltersTestJSON-924365092 tempest-ListServerFiltersTestJSON-924365092-project-member] [instance: c3514756-a861-4662-b859-0881ced78e19] Starting instance... {{(pid=61962) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2424}} [ 1674.816423] env[61962]: WARNING nova.virt.vmwareapi.vmops [None req-167bc033-d799-42bb-8ef9-8056818dfa51 tempest-ServersTestManualDisk-1747286721 tempest-ServersTestManualDisk-1747286721-project-member] [instance: 35bf25a7-a3c2-40f1-b415-42d6167b0a1c] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 35bf25a7-a3c2-40f1-b415-42d6167b0a1c could not be found. [ 1674.816787] env[61962]: DEBUG nova.virt.vmwareapi.vmops [None req-167bc033-d799-42bb-8ef9-8056818dfa51 tempest-ServersTestManualDisk-1747286721 tempest-ServersTestManualDisk-1747286721-project-member] [instance: 35bf25a7-a3c2-40f1-b415-42d6167b0a1c] Instance destroyed {{(pid=61962) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1674.817025] env[61962]: INFO nova.compute.manager [None req-167bc033-d799-42bb-8ef9-8056818dfa51 tempest-ServersTestManualDisk-1747286721 tempest-ServersTestManualDisk-1747286721-project-member] [instance: 35bf25a7-a3c2-40f1-b415-42d6167b0a1c] Took 0.05 seconds to destroy the instance on the hypervisor. [ 1674.817302] env[61962]: DEBUG oslo.service.loopingcall [None req-167bc033-d799-42bb-8ef9-8056818dfa51 tempest-ServersTestManualDisk-1747286721 tempest-ServersTestManualDisk-1747286721-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61962) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1674.817764] env[61962]: DEBUG nova.compute.manager [None req-094311a2-9f80-4d6b-9521-9fcfd74b3cac tempest-ListServerFiltersTestJSON-924365092 tempest-ListServerFiltersTestJSON-924365092-project-member] [instance: c3514756-a861-4662-b859-0881ced78e19] Instance disappeared before build. {{(pid=61962) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2431}} [ 1674.819430] env[61962]: DEBUG nova.compute.manager [-] [instance: 35bf25a7-a3c2-40f1-b415-42d6167b0a1c] Deallocating network for instance {{(pid=61962) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2281}} [ 1674.819430] env[61962]: DEBUG nova.network.neutron [-] [instance: 35bf25a7-a3c2-40f1-b415-42d6167b0a1c] deallocate_for_instance() {{(pid=61962) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1674.846082] env[61962]: DEBUG oslo_concurrency.lockutils [None req-094311a2-9f80-4d6b-9521-9fcfd74b3cac tempest-ListServerFiltersTestJSON-924365092 tempest-ListServerFiltersTestJSON-924365092-project-member] Lock "c3514756-a861-4662-b859-0881ced78e19" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 220.367s {{(pid=61962) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1674.847714] env[61962]: DEBUG nova.network.neutron [-] [instance: 35bf25a7-a3c2-40f1-b415-42d6167b0a1c] Updating instance_info_cache with network_info: [] {{(pid=61962) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1674.857294] env[61962]: INFO nova.compute.manager [-] [instance: 35bf25a7-a3c2-40f1-b415-42d6167b0a1c] Took 0.04 seconds to deallocate network for instance. [ 1674.859352] env[61962]: DEBUG nova.compute.manager [None req-e364545b-7bcc-40d5-a3ad-56d3b52a7b42 tempest-ServersTestJSON-536512511 tempest-ServersTestJSON-536512511-project-member] [instance: 02c198ea-61de-4e70-866c-39781d353d04] Starting instance... {{(pid=61962) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2424}} [ 1674.920948] env[61962]: DEBUG oslo_concurrency.lockutils [None req-e364545b-7bcc-40d5-a3ad-56d3b52a7b42 tempest-ServersTestJSON-536512511 tempest-ServersTestJSON-536512511-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61962) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1674.921957] env[61962]: DEBUG oslo_concurrency.lockutils [None req-e364545b-7bcc-40d5-a3ad-56d3b52a7b42 tempest-ServersTestJSON-536512511 tempest-ServersTestJSON-536512511-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=61962) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1674.923018] env[61962]: INFO nova.compute.claims [None req-e364545b-7bcc-40d5-a3ad-56d3b52a7b42 tempest-ServersTestJSON-536512511 tempest-ServersTestJSON-536512511-project-member] [instance: 02c198ea-61de-4e70-866c-39781d353d04] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1674.964138] env[61962]: DEBUG oslo_concurrency.lockutils [None req-167bc033-d799-42bb-8ef9-8056818dfa51 tempest-ServersTestManualDisk-1747286721 tempest-ServersTestManualDisk-1747286721-project-member] Lock "35bf25a7-a3c2-40f1-b415-42d6167b0a1c" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 0.201s {{(pid=61962) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1675.121011] env[61962]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c480e677-434a-4227-a42b-2b3a3b2cc537 {{(pid=61962) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1675.129778] env[61962]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8dec170d-a40a-4928-bd26-b36036cf94ac {{(pid=61962) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1675.164450] env[61962]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-761ca783-4f98-4c0a-8175-d58ce47cf3b0 {{(pid=61962) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1675.172663] env[61962]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-52521e6e-a048-4b5e-8f9d-77d8dddeecbe {{(pid=61962) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1675.187162] env[61962]: DEBUG nova.compute.provider_tree [None req-e364545b-7bcc-40d5-a3ad-56d3b52a7b42 tempest-ServersTestJSON-536512511 tempest-ServersTestJSON-536512511-project-member] Inventory has not changed in ProviderTree for provider: 5a20dc57-fddd-49ec-bab5-953a03eebaa1 {{(pid=61962) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1675.196324] env[61962]: DEBUG nova.scheduler.client.report [None req-e364545b-7bcc-40d5-a3ad-56d3b52a7b42 tempest-ServersTestJSON-536512511 tempest-ServersTestJSON-536512511-project-member] Inventory has not changed for provider 5a20dc57-fddd-49ec-bab5-953a03eebaa1 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 96, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61962) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1675.211639] env[61962]: DEBUG oslo_concurrency.lockutils [None req-e364545b-7bcc-40d5-a3ad-56d3b52a7b42 tempest-ServersTestJSON-536512511 tempest-ServersTestJSON-536512511-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.290s {{(pid=61962) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1675.212179] env[61962]: DEBUG nova.compute.manager [None req-e364545b-7bcc-40d5-a3ad-56d3b52a7b42 tempest-ServersTestJSON-536512511 tempest-ServersTestJSON-536512511-project-member] [instance: 02c198ea-61de-4e70-866c-39781d353d04] Start building networks asynchronously for instance. {{(pid=61962) _build_resources /opt/stack/nova/nova/compute/manager.py:2821}} [ 1675.250629] env[61962]: DEBUG nova.compute.utils [None req-e364545b-7bcc-40d5-a3ad-56d3b52a7b42 tempest-ServersTestJSON-536512511 tempest-ServersTestJSON-536512511-project-member] Using /dev/sd instead of None {{(pid=61962) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1675.251926] env[61962]: DEBUG nova.compute.manager [None req-e364545b-7bcc-40d5-a3ad-56d3b52a7b42 tempest-ServersTestJSON-536512511 tempest-ServersTestJSON-536512511-project-member] [instance: 02c198ea-61de-4e70-866c-39781d353d04] Allocating IP information in the background. {{(pid=61962) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1973}} [ 1675.252106] env[61962]: DEBUG nova.network.neutron [None req-e364545b-7bcc-40d5-a3ad-56d3b52a7b42 tempest-ServersTestJSON-536512511 tempest-ServersTestJSON-536512511-project-member] [instance: 02c198ea-61de-4e70-866c-39781d353d04] allocate_for_instance() {{(pid=61962) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 1675.263623] env[61962]: DEBUG nova.compute.manager [None req-e364545b-7bcc-40d5-a3ad-56d3b52a7b42 tempest-ServersTestJSON-536512511 tempest-ServersTestJSON-536512511-project-member] [instance: 02c198ea-61de-4e70-866c-39781d353d04] Start building block device mappings for instance. {{(pid=61962) _build_resources /opt/stack/nova/nova/compute/manager.py:2856}} [ 1675.333210] env[61962]: DEBUG nova.compute.manager [None req-e364545b-7bcc-40d5-a3ad-56d3b52a7b42 tempest-ServersTestJSON-536512511 tempest-ServersTestJSON-536512511-project-member] [instance: 02c198ea-61de-4e70-866c-39781d353d04] Start spawning the instance on the hypervisor. {{(pid=61962) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2630}} [ 1675.361224] env[61962]: DEBUG nova.virt.hardware [None req-e364545b-7bcc-40d5-a3ad-56d3b52a7b42 tempest-ServersTestJSON-536512511 tempest-ServersTestJSON-536512511-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-12-01T12:09:23Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=128,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-12-01T12:09:07Z,direct_url=,disk_format='vmdk',id=f684bb17-3ab2-4bd5-a19e-4c36c57d0ebe,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='eef556fb5c4f49b889491ae31a496de5',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-12-01T12:09:08Z,virtual_size=,visibility=), allow threads: False {{(pid=61962) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1675.361479] env[61962]: DEBUG nova.virt.hardware [None req-e364545b-7bcc-40d5-a3ad-56d3b52a7b42 tempest-ServersTestJSON-536512511 tempest-ServersTestJSON-536512511-project-member] Flavor limits 0:0:0 {{(pid=61962) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1675.361642] env[61962]: DEBUG nova.virt.hardware [None req-e364545b-7bcc-40d5-a3ad-56d3b52a7b42 tempest-ServersTestJSON-536512511 tempest-ServersTestJSON-536512511-project-member] Image limits 0:0:0 {{(pid=61962) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1675.361810] env[61962]: DEBUG nova.virt.hardware [None req-e364545b-7bcc-40d5-a3ad-56d3b52a7b42 tempest-ServersTestJSON-536512511 tempest-ServersTestJSON-536512511-project-member] Flavor pref 0:0:0 {{(pid=61962) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1675.361951] env[61962]: DEBUG nova.virt.hardware [None req-e364545b-7bcc-40d5-a3ad-56d3b52a7b42 tempest-ServersTestJSON-536512511 tempest-ServersTestJSON-536512511-project-member] Image pref 0:0:0 {{(pid=61962) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1675.362109] env[61962]: DEBUG nova.virt.hardware [None req-e364545b-7bcc-40d5-a3ad-56d3b52a7b42 tempest-ServersTestJSON-536512511 tempest-ServersTestJSON-536512511-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61962) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1675.362317] env[61962]: DEBUG nova.virt.hardware [None req-e364545b-7bcc-40d5-a3ad-56d3b52a7b42 tempest-ServersTestJSON-536512511 tempest-ServersTestJSON-536512511-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61962) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1675.362475] env[61962]: DEBUG nova.virt.hardware [None req-e364545b-7bcc-40d5-a3ad-56d3b52a7b42 tempest-ServersTestJSON-536512511 tempest-ServersTestJSON-536512511-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61962) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1675.362725] env[61962]: DEBUG nova.virt.hardware [None req-e364545b-7bcc-40d5-a3ad-56d3b52a7b42 tempest-ServersTestJSON-536512511 tempest-ServersTestJSON-536512511-project-member] Got 1 possible topologies {{(pid=61962) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1675.362924] env[61962]: DEBUG nova.virt.hardware [None req-e364545b-7bcc-40d5-a3ad-56d3b52a7b42 tempest-ServersTestJSON-536512511 tempest-ServersTestJSON-536512511-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61962) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1675.363115] env[61962]: DEBUG nova.virt.hardware [None req-e364545b-7bcc-40d5-a3ad-56d3b52a7b42 tempest-ServersTestJSON-536512511 tempest-ServersTestJSON-536512511-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61962) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1675.363977] env[61962]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d01a5626-3894-4175-8b0a-6fee2bc9eb5f {{(pid=61962) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1675.373431] env[61962]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-13b77a00-b574-407a-b881-9362a6f4a615 {{(pid=61962) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1675.414080] env[61962]: DEBUG nova.policy [None req-e364545b-7bcc-40d5-a3ad-56d3b52a7b42 tempest-ServersTestJSON-536512511 tempest-ServersTestJSON-536512511-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '0be80fa75c4c49d899a3dac926c1ed37', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'cc34536d51ef4a22aef0b274ba5aa28e', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61962) authorize /opt/stack/nova/nova/policy.py:203}} [ 1675.773089] env[61962]: DEBUG nova.network.neutron [None req-e364545b-7bcc-40d5-a3ad-56d3b52a7b42 tempest-ServersTestJSON-536512511 tempest-ServersTestJSON-536512511-project-member] [instance: 02c198ea-61de-4e70-866c-39781d353d04] Successfully created port: 64d0707e-4ffe-4ba3-81ec-1d8b74460dab {{(pid=61962) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1676.502304] env[61962]: DEBUG nova.network.neutron [None req-e364545b-7bcc-40d5-a3ad-56d3b52a7b42 tempest-ServersTestJSON-536512511 tempest-ServersTestJSON-536512511-project-member] [instance: 02c198ea-61de-4e70-866c-39781d353d04] Successfully updated port: 64d0707e-4ffe-4ba3-81ec-1d8b74460dab {{(pid=61962) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1676.519693] env[61962]: DEBUG oslo_concurrency.lockutils [None req-e364545b-7bcc-40d5-a3ad-56d3b52a7b42 tempest-ServersTestJSON-536512511 tempest-ServersTestJSON-536512511-project-member] Acquiring lock "refresh_cache-02c198ea-61de-4e70-866c-39781d353d04" {{(pid=61962) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1676.519750] env[61962]: DEBUG oslo_concurrency.lockutils [None req-e364545b-7bcc-40d5-a3ad-56d3b52a7b42 tempest-ServersTestJSON-536512511 tempest-ServersTestJSON-536512511-project-member] Acquired lock "refresh_cache-02c198ea-61de-4e70-866c-39781d353d04" {{(pid=61962) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1676.519888] env[61962]: DEBUG nova.network.neutron [None req-e364545b-7bcc-40d5-a3ad-56d3b52a7b42 tempest-ServersTestJSON-536512511 tempest-ServersTestJSON-536512511-project-member] [instance: 02c198ea-61de-4e70-866c-39781d353d04] Building network info cache for instance {{(pid=61962) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 1676.580642] env[61962]: DEBUG nova.network.neutron [None req-e364545b-7bcc-40d5-a3ad-56d3b52a7b42 tempest-ServersTestJSON-536512511 tempest-ServersTestJSON-536512511-project-member] [instance: 02c198ea-61de-4e70-866c-39781d353d04] Instance cache missing network info. {{(pid=61962) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 1676.661506] env[61962]: DEBUG nova.compute.manager [req-f6a24767-1ae7-47a7-960f-432c4239e828 req-a059177d-0957-49d5-8c0a-2fefe0005ae1 service nova] [instance: 02c198ea-61de-4e70-866c-39781d353d04] Received event network-vif-plugged-64d0707e-4ffe-4ba3-81ec-1d8b74460dab {{(pid=61962) external_instance_event /opt/stack/nova/nova/compute/manager.py:11210}} [ 1676.661756] env[61962]: DEBUG oslo_concurrency.lockutils [req-f6a24767-1ae7-47a7-960f-432c4239e828 req-a059177d-0957-49d5-8c0a-2fefe0005ae1 service nova] Acquiring lock "02c198ea-61de-4e70-866c-39781d353d04-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=61962) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1676.662031] env[61962]: DEBUG oslo_concurrency.lockutils [req-f6a24767-1ae7-47a7-960f-432c4239e828 req-a059177d-0957-49d5-8c0a-2fefe0005ae1 service nova] Lock "02c198ea-61de-4e70-866c-39781d353d04-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=61962) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1676.662091] env[61962]: DEBUG oslo_concurrency.lockutils [req-f6a24767-1ae7-47a7-960f-432c4239e828 req-a059177d-0957-49d5-8c0a-2fefe0005ae1 service nova] Lock "02c198ea-61de-4e70-866c-39781d353d04-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=61962) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1676.662257] env[61962]: DEBUG nova.compute.manager [req-f6a24767-1ae7-47a7-960f-432c4239e828 req-a059177d-0957-49d5-8c0a-2fefe0005ae1 service nova] [instance: 02c198ea-61de-4e70-866c-39781d353d04] No waiting events found dispatching network-vif-plugged-64d0707e-4ffe-4ba3-81ec-1d8b74460dab {{(pid=61962) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1676.662421] env[61962]: WARNING nova.compute.manager [req-f6a24767-1ae7-47a7-960f-432c4239e828 req-a059177d-0957-49d5-8c0a-2fefe0005ae1 service nova] [instance: 02c198ea-61de-4e70-866c-39781d353d04] Received unexpected event network-vif-plugged-64d0707e-4ffe-4ba3-81ec-1d8b74460dab for instance with vm_state building and task_state spawning. [ 1676.662605] env[61962]: DEBUG nova.compute.manager [req-f6a24767-1ae7-47a7-960f-432c4239e828 req-a059177d-0957-49d5-8c0a-2fefe0005ae1 service nova] [instance: 02c198ea-61de-4e70-866c-39781d353d04] Received event network-changed-64d0707e-4ffe-4ba3-81ec-1d8b74460dab {{(pid=61962) external_instance_event /opt/stack/nova/nova/compute/manager.py:11210}} [ 1676.662772] env[61962]: DEBUG nova.compute.manager [req-f6a24767-1ae7-47a7-960f-432c4239e828 req-a059177d-0957-49d5-8c0a-2fefe0005ae1 service nova] [instance: 02c198ea-61de-4e70-866c-39781d353d04] Refreshing instance network info cache due to event network-changed-64d0707e-4ffe-4ba3-81ec-1d8b74460dab. {{(pid=61962) external_instance_event /opt/stack/nova/nova/compute/manager.py:11215}} [ 1676.662944] env[61962]: DEBUG oslo_concurrency.lockutils [req-f6a24767-1ae7-47a7-960f-432c4239e828 req-a059177d-0957-49d5-8c0a-2fefe0005ae1 service nova] Acquiring lock "refresh_cache-02c198ea-61de-4e70-866c-39781d353d04" {{(pid=61962) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1676.818906] env[61962]: DEBUG nova.network.neutron [None req-e364545b-7bcc-40d5-a3ad-56d3b52a7b42 tempest-ServersTestJSON-536512511 tempest-ServersTestJSON-536512511-project-member] [instance: 02c198ea-61de-4e70-866c-39781d353d04] Updating instance_info_cache with network_info: [{"id": "64d0707e-4ffe-4ba3-81ec-1d8b74460dab", "address": "fa:16:3e:cc:79:33", "network": {"id": "d8431398-17df-4a56-85cb-1034c177202a", "bridge": "br-int", "label": "tempest-ServersTestJSON-1395925254-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "cc34536d51ef4a22aef0b274ba5aa28e", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c4d3f69a-b086-4c3b-b976-5a848b63dfc4", "external-id": "nsx-vlan-transportzone-627", "segmentation_id": 627, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap64d0707e-4f", "ovs_interfaceid": "64d0707e-4ffe-4ba3-81ec-1d8b74460dab", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61962) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1676.832803] env[61962]: DEBUG oslo_concurrency.lockutils [None req-e364545b-7bcc-40d5-a3ad-56d3b52a7b42 tempest-ServersTestJSON-536512511 tempest-ServersTestJSON-536512511-project-member] Releasing lock "refresh_cache-02c198ea-61de-4e70-866c-39781d353d04" {{(pid=61962) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1676.833179] env[61962]: DEBUG nova.compute.manager [None req-e364545b-7bcc-40d5-a3ad-56d3b52a7b42 tempest-ServersTestJSON-536512511 tempest-ServersTestJSON-536512511-project-member] [instance: 02c198ea-61de-4e70-866c-39781d353d04] Instance network_info: |[{"id": "64d0707e-4ffe-4ba3-81ec-1d8b74460dab", "address": "fa:16:3e:cc:79:33", "network": {"id": "d8431398-17df-4a56-85cb-1034c177202a", "bridge": "br-int", "label": "tempest-ServersTestJSON-1395925254-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "cc34536d51ef4a22aef0b274ba5aa28e", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c4d3f69a-b086-4c3b-b976-5a848b63dfc4", "external-id": "nsx-vlan-transportzone-627", "segmentation_id": 627, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap64d0707e-4f", "ovs_interfaceid": "64d0707e-4ffe-4ba3-81ec-1d8b74460dab", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=61962) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1676.833501] env[61962]: DEBUG oslo_concurrency.lockutils [req-f6a24767-1ae7-47a7-960f-432c4239e828 req-a059177d-0957-49d5-8c0a-2fefe0005ae1 service nova] Acquired lock "refresh_cache-02c198ea-61de-4e70-866c-39781d353d04" {{(pid=61962) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1676.833683] env[61962]: DEBUG nova.network.neutron [req-f6a24767-1ae7-47a7-960f-432c4239e828 req-a059177d-0957-49d5-8c0a-2fefe0005ae1 service nova] [instance: 02c198ea-61de-4e70-866c-39781d353d04] Refreshing network info cache for port 64d0707e-4ffe-4ba3-81ec-1d8b74460dab {{(pid=61962) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 1676.835122] env[61962]: DEBUG nova.virt.vmwareapi.vmops [None req-e364545b-7bcc-40d5-a3ad-56d3b52a7b42 tempest-ServersTestJSON-536512511 tempest-ServersTestJSON-536512511-project-member] [instance: 02c198ea-61de-4e70-866c-39781d353d04] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:cc:79:33', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'c4d3f69a-b086-4c3b-b976-5a848b63dfc4', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '64d0707e-4ffe-4ba3-81ec-1d8b74460dab', 'vif_model': 'vmxnet3'}] {{(pid=61962) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1676.842993] env[61962]: DEBUG nova.virt.vmwareapi.vm_util [None req-e364545b-7bcc-40d5-a3ad-56d3b52a7b42 tempest-ServersTestJSON-536512511 tempest-ServersTestJSON-536512511-project-member] Creating folder: Project (cc34536d51ef4a22aef0b274ba5aa28e). Parent ref: group-v953327. {{(pid=61962) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 1676.843936] env[61962]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-321b46e2-19bd-48c4-bef5-833c2eb621fa {{(pid=61962) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1676.857568] env[61962]: INFO nova.virt.vmwareapi.vm_util [None req-e364545b-7bcc-40d5-a3ad-56d3b52a7b42 tempest-ServersTestJSON-536512511 tempest-ServersTestJSON-536512511-project-member] Created folder: Project (cc34536d51ef4a22aef0b274ba5aa28e) in parent group-v953327. [ 1676.857767] env[61962]: DEBUG nova.virt.vmwareapi.vm_util [None req-e364545b-7bcc-40d5-a3ad-56d3b52a7b42 tempest-ServersTestJSON-536512511 tempest-ServersTestJSON-536512511-project-member] Creating folder: Instances. Parent ref: group-v953421. {{(pid=61962) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 1676.858015] env[61962]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-683b3949-0480-486c-9785-ffcee7fdbe19 {{(pid=61962) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1676.871647] env[61962]: INFO nova.virt.vmwareapi.vm_util [None req-e364545b-7bcc-40d5-a3ad-56d3b52a7b42 tempest-ServersTestJSON-536512511 tempest-ServersTestJSON-536512511-project-member] Created folder: Instances in parent group-v953421. [ 1676.872045] env[61962]: DEBUG oslo.service.loopingcall [None req-e364545b-7bcc-40d5-a3ad-56d3b52a7b42 tempest-ServersTestJSON-536512511 tempest-ServersTestJSON-536512511-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=61962) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1676.872119] env[61962]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 02c198ea-61de-4e70-866c-39781d353d04] Creating VM on the ESX host {{(pid=61962) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 1676.872446] env[61962]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-f62259c6-eb4f-4067-9d83-a156fc09550d {{(pid=61962) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1676.894520] env[61962]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1676.894520] env[61962]: value = "task-4892072" [ 1676.894520] env[61962]: _type = "Task" [ 1676.894520] env[61962]: } to complete. {{(pid=61962) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1676.902866] env[61962]: DEBUG oslo_vmware.api [-] Task: {'id': task-4892072, 'name': CreateVM_Task} progress is 0%. {{(pid=61962) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1677.124188] env[61962]: DEBUG nova.network.neutron [req-f6a24767-1ae7-47a7-960f-432c4239e828 req-a059177d-0957-49d5-8c0a-2fefe0005ae1 service nova] [instance: 02c198ea-61de-4e70-866c-39781d353d04] Updated VIF entry in instance network info cache for port 64d0707e-4ffe-4ba3-81ec-1d8b74460dab. {{(pid=61962) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 1677.124789] env[61962]: DEBUG nova.network.neutron [req-f6a24767-1ae7-47a7-960f-432c4239e828 req-a059177d-0957-49d5-8c0a-2fefe0005ae1 service nova] [instance: 02c198ea-61de-4e70-866c-39781d353d04] Updating instance_info_cache with network_info: [{"id": "64d0707e-4ffe-4ba3-81ec-1d8b74460dab", "address": "fa:16:3e:cc:79:33", "network": {"id": "d8431398-17df-4a56-85cb-1034c177202a", "bridge": "br-int", "label": "tempest-ServersTestJSON-1395925254-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "cc34536d51ef4a22aef0b274ba5aa28e", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c4d3f69a-b086-4c3b-b976-5a848b63dfc4", "external-id": "nsx-vlan-transportzone-627", "segmentation_id": 627, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap64d0707e-4f", "ovs_interfaceid": "64d0707e-4ffe-4ba3-81ec-1d8b74460dab", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61962) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1677.135849] env[61962]: DEBUG oslo_concurrency.lockutils [req-f6a24767-1ae7-47a7-960f-432c4239e828 req-a059177d-0957-49d5-8c0a-2fefe0005ae1 service nova] Releasing lock "refresh_cache-02c198ea-61de-4e70-866c-39781d353d04" {{(pid=61962) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1677.404719] env[61962]: DEBUG oslo_vmware.api [-] Task: {'id': task-4892072, 'name': CreateVM_Task, 'duration_secs': 0.311876} completed successfully. {{(pid=61962) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1677.404895] env[61962]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 02c198ea-61de-4e70-866c-39781d353d04] Created VM on the ESX host {{(pid=61962) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 1677.405513] env[61962]: DEBUG oslo_concurrency.lockutils [None req-e364545b-7bcc-40d5-a3ad-56d3b52a7b42 tempest-ServersTestJSON-536512511 tempest-ServersTestJSON-536512511-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/f684bb17-3ab2-4bd5-a19e-4c36c57d0ebe" {{(pid=61962) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1677.405687] env[61962]: DEBUG oslo_concurrency.lockutils [None req-e364545b-7bcc-40d5-a3ad-56d3b52a7b42 tempest-ServersTestJSON-536512511 tempest-ServersTestJSON-536512511-project-member] Acquired lock "[datastore2] devstack-image-cache_base/f684bb17-3ab2-4bd5-a19e-4c36c57d0ebe" {{(pid=61962) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1677.406061] env[61962]: DEBUG oslo_concurrency.lockutils [None req-e364545b-7bcc-40d5-a3ad-56d3b52a7b42 tempest-ServersTestJSON-536512511 tempest-ServersTestJSON-536512511-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/f684bb17-3ab2-4bd5-a19e-4c36c57d0ebe" {{(pid=61962) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1677.406318] env[61962]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-0e9413cc-de87-40c6-909d-a768b3f92dd1 {{(pid=61962) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1677.410965] env[61962]: DEBUG oslo_vmware.api [None req-e364545b-7bcc-40d5-a3ad-56d3b52a7b42 tempest-ServersTestJSON-536512511 tempest-ServersTestJSON-536512511-project-member] Waiting for the task: (returnval){ [ 1677.410965] env[61962]: value = "session[5210918f-aadc-9b29-42fa-0929c2e42627]52fd09cb-bfd2-bcab-e8d8-525cbd9ff3de" [ 1677.410965] env[61962]: _type = "Task" [ 1677.410965] env[61962]: } to complete. {{(pid=61962) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1677.419764] env[61962]: DEBUG oslo_vmware.api [None req-e364545b-7bcc-40d5-a3ad-56d3b52a7b42 tempest-ServersTestJSON-536512511 tempest-ServersTestJSON-536512511-project-member] Task: {'id': session[5210918f-aadc-9b29-42fa-0929c2e42627]52fd09cb-bfd2-bcab-e8d8-525cbd9ff3de, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61962) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1677.922149] env[61962]: DEBUG oslo_concurrency.lockutils [None req-e364545b-7bcc-40d5-a3ad-56d3b52a7b42 tempest-ServersTestJSON-536512511 tempest-ServersTestJSON-536512511-project-member] Releasing lock "[datastore2] devstack-image-cache_base/f684bb17-3ab2-4bd5-a19e-4c36c57d0ebe" {{(pid=61962) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1677.922149] env[61962]: DEBUG nova.virt.vmwareapi.vmops [None req-e364545b-7bcc-40d5-a3ad-56d3b52a7b42 tempest-ServersTestJSON-536512511 tempest-ServersTestJSON-536512511-project-member] [instance: 02c198ea-61de-4e70-866c-39781d353d04] Processing image f684bb17-3ab2-4bd5-a19e-4c36c57d0ebe {{(pid=61962) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1677.922149] env[61962]: DEBUG oslo_concurrency.lockutils [None req-e364545b-7bcc-40d5-a3ad-56d3b52a7b42 tempest-ServersTestJSON-536512511 tempest-ServersTestJSON-536512511-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/f684bb17-3ab2-4bd5-a19e-4c36c57d0ebe/f684bb17-3ab2-4bd5-a19e-4c36c57d0ebe.vmdk" {{(pid=61962) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1678.313487] env[61962]: DEBUG oslo_concurrency.lockutils [None req-36219049-67ba-4c74-811b-90c296d8053e tempest-ImagesTestJSON-240028984 tempest-ImagesTestJSON-240028984-project-member] Acquiring lock "85003695-cb3e-4bce-9a3a-a1d799b36369" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61962) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1678.313716] env[61962]: DEBUG oslo_concurrency.lockutils [None req-36219049-67ba-4c74-811b-90c296d8053e tempest-ImagesTestJSON-240028984 tempest-ImagesTestJSON-240028984-project-member] Lock "85003695-cb3e-4bce-9a3a-a1d799b36369" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61962) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1701.602589] env[61962]: DEBUG oslo_concurrency.lockutils [None req-6f89abdd-04ea-4e19-b728-c96ff4a1545c tempest-ServersTestJSON-536512511 tempest-ServersTestJSON-536512511-project-member] Acquiring lock "02c198ea-61de-4e70-866c-39781d353d04" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=61962) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1707.158549] env[61962]: DEBUG oslo_service.periodic_task [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Running periodic task ComputeManager.update_available_resource {{(pid=61962) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1707.172353] env[61962]: DEBUG oslo_concurrency.lockutils [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=61962) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1707.172578] env[61962]: DEBUG oslo_concurrency.lockutils [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=61962) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1707.172744] env[61962]: DEBUG oslo_concurrency.lockutils [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=61962) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1707.172908] env[61962]: DEBUG nova.compute.resource_tracker [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=61962) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 1707.174099] env[61962]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8cab7097-680b-4525-85a6-2a1beda52f71 {{(pid=61962) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1707.183311] env[61962]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d3328251-e4b4-430a-89b6-b9b9ccec6e8c {{(pid=61962) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1707.199603] env[61962]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ab338e2e-d980-49dd-ac42-b029a4fc6b48 {{(pid=61962) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1707.206511] env[61962]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a27b753a-ace8-4d5f-9900-33ff6f9a3f64 {{(pid=61962) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1707.236139] env[61962]: DEBUG nova.compute.resource_tracker [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=180523MB free_disk=96GB free_vcpus=48 pci_devices=None {{(pid=61962) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 1707.236300] env[61962]: DEBUG oslo_concurrency.lockutils [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=61962) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1707.236507] env[61962]: DEBUG oslo_concurrency.lockutils [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=61962) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1707.315217] env[61962]: DEBUG nova.compute.resource_tracker [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Instance 1f7429fe-e6f3-4b0a-bae8-38ead4607ae7 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61962) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 1707.315430] env[61962]: DEBUG nova.compute.resource_tracker [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Instance c3c36949-754e-4bab-860d-74e0cde4dfd6 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61962) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 1707.315577] env[61962]: DEBUG nova.compute.resource_tracker [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Instance 6f0c884b-afb3-4d06-9dd2-f07099c364e0 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61962) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 1707.315745] env[61962]: DEBUG nova.compute.resource_tracker [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Instance 3d2749f8-18d2-4631-adc2-093988125eb4 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61962) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 1707.315877] env[61962]: DEBUG nova.compute.resource_tracker [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Instance 1035139f-2193-4d1c-a220-369ef6096ae0 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61962) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 1707.315993] env[61962]: DEBUG nova.compute.resource_tracker [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Instance 53048279-c3ea-484f-9a3e-049b12892f7a actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61962) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 1707.316225] env[61962]: DEBUG nova.compute.resource_tracker [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Instance 25f6fa54-3d0d-4470-b5c9-06ba0ad2663d actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61962) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 1707.316409] env[61962]: DEBUG nova.compute.resource_tracker [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Instance 1e71cc99-f026-4197-b7f1-ae18f7f99af6 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61962) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 1707.316558] env[61962]: DEBUG nova.compute.resource_tracker [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Instance 2cf5409f-eaff-4b63-b8bb-eb15a7a81760 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61962) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 1707.316719] env[61962]: DEBUG nova.compute.resource_tracker [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Instance 02c198ea-61de-4e70-866c-39781d353d04 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61962) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 1707.328132] env[61962]: DEBUG nova.compute.resource_tracker [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Instance 85003695-cb3e-4bce-9a3a-a1d799b36369 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61962) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1767}} [ 1707.328377] env[61962]: DEBUG nova.compute.resource_tracker [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Total usable vcpus: 48, total allocated vcpus: 10 {{(pid=61962) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 1707.328550] env[61962]: DEBUG nova.compute.resource_tracker [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=1792MB phys_disk=100GB used_disk=10GB total_vcpus=48 used_vcpus=10 pci_stats=[] stats={'failed_builds': '76', 'num_instances': '10', 'num_vm_building': '10', 'num_task_deleting': '9', 'num_os_type_None': '10', 'num_proj_e816704de44649099e96127498eed7e9': '1', 'io_workload': '10', 'num_proj_eaa1993aea5f4f039f35b32e2321e30f': '1', 'num_proj_a31ea93dd586435ebb82c1dbfd14c2ad': '1', 'num_proj_34477c98bf9c4a87971a62cb96b072c7': '1', 'num_proj_e58b68272c404983993a09ad12e8c03b': '1', 'num_proj_88a7b37d54f047149b05fc80e0b51601': '1', 'num_proj_40bdd99e47aa40a28edd38c6b4a09a5f': '1', 'num_task_spawning': '1', 'num_proj_56bb35c701074f02820d93ad5911d089': '2', 'num_proj_cc34536d51ef4a22aef0b274ba5aa28e': '1'} {{(pid=61962) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 1707.463594] env[61962]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-50df6f5d-6b8e-46b7-869d-0bc3996002e5 {{(pid=61962) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1707.471173] env[61962]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6e3d9ba8-af88-49ef-bfcb-e5a5f5316577 {{(pid=61962) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1707.502037] env[61962]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5a98038c-6dbe-49fd-aa37-09146b8196a5 {{(pid=61962) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1707.509474] env[61962]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0bb2ff38-b153-4498-b983-aec40f44ee9b {{(pid=61962) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1707.522292] env[61962]: DEBUG nova.compute.provider_tree [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Inventory has not changed in ProviderTree for provider: 5a20dc57-fddd-49ec-bab5-953a03eebaa1 {{(pid=61962) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1707.532493] env[61962]: DEBUG nova.scheduler.client.report [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Inventory has not changed for provider 5a20dc57-fddd-49ec-bab5-953a03eebaa1 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 96, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61962) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1707.546361] env[61962]: DEBUG nova.compute.resource_tracker [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=61962) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 1707.546542] env[61962]: DEBUG oslo_concurrency.lockutils [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 0.310s {{(pid=61962) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1712.547667] env[61962]: DEBUG oslo_service.periodic_task [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=61962) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1712.548950] env[61962]: DEBUG nova.compute.manager [None req-ba337279-4320-40ca-b616-7e590432f203 None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=61962) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10626}} [ 1714.155127] env[61962]: DEBUG oslo_service.periodic_task [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=61962) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1717.158430] env[61962]: DEBUG oslo_service.periodic_task [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=61962) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1718.159796] env[61962]: DEBUG oslo_service.periodic_task [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Running periodic task ComputeManager._run_pending_deletes {{(pid=61962) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1718.159796] env[61962]: DEBUG nova.compute.manager [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Cleaning up deleted instances {{(pid=61962) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11307}} [ 1718.172498] env[61962]: DEBUG nova.compute.manager [None req-ba337279-4320-40ca-b616-7e590432f203 None None] There are 0 instances to clean {{(pid=61962) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11316}} [ 1719.171696] env[61962]: DEBUG oslo_service.periodic_task [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=61962) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1719.172129] env[61962]: DEBUG nova.compute.manager [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Starting heal instance info cache {{(pid=61962) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10007}} [ 1719.172129] env[61962]: DEBUG nova.compute.manager [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Rebuilding the list of instances to heal {{(pid=61962) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10011}} [ 1719.195904] env[61962]: DEBUG nova.compute.manager [None req-ba337279-4320-40ca-b616-7e590432f203 None None] [instance: 1f7429fe-e6f3-4b0a-bae8-38ead4607ae7] Skipping network cache update for instance because it is Building. {{(pid=61962) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10020}} [ 1719.196131] env[61962]: DEBUG nova.compute.manager [None req-ba337279-4320-40ca-b616-7e590432f203 None None] [instance: c3c36949-754e-4bab-860d-74e0cde4dfd6] Skipping network cache update for instance because it is Building. {{(pid=61962) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10020}} [ 1719.196297] env[61962]: DEBUG nova.compute.manager [None req-ba337279-4320-40ca-b616-7e590432f203 None None] [instance: 6f0c884b-afb3-4d06-9dd2-f07099c364e0] Skipping network cache update for instance because it is Building. {{(pid=61962) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10020}} [ 1719.196433] env[61962]: DEBUG nova.compute.manager [None req-ba337279-4320-40ca-b616-7e590432f203 None None] [instance: 3d2749f8-18d2-4631-adc2-093988125eb4] Skipping network cache update for instance because it is Building. {{(pid=61962) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10020}} [ 1719.196561] env[61962]: DEBUG nova.compute.manager [None req-ba337279-4320-40ca-b616-7e590432f203 None None] [instance: 1035139f-2193-4d1c-a220-369ef6096ae0] Skipping network cache update for instance because it is Building. {{(pid=61962) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10020}} [ 1719.196683] env[61962]: DEBUG nova.compute.manager [None req-ba337279-4320-40ca-b616-7e590432f203 None None] [instance: 53048279-c3ea-484f-9a3e-049b12892f7a] Skipping network cache update for instance because it is Building. {{(pid=61962) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10020}} [ 1719.196802] env[61962]: DEBUG nova.compute.manager [None req-ba337279-4320-40ca-b616-7e590432f203 None None] [instance: 25f6fa54-3d0d-4470-b5c9-06ba0ad2663d] Skipping network cache update for instance because it is Building. {{(pid=61962) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10020}} [ 1719.196922] env[61962]: DEBUG nova.compute.manager [None req-ba337279-4320-40ca-b616-7e590432f203 None None] [instance: 1e71cc99-f026-4197-b7f1-ae18f7f99af6] Skipping network cache update for instance because it is Building. {{(pid=61962) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10020}} [ 1719.197073] env[61962]: DEBUG nova.compute.manager [None req-ba337279-4320-40ca-b616-7e590432f203 None None] [instance: 2cf5409f-eaff-4b63-b8bb-eb15a7a81760] Skipping network cache update for instance because it is Building. {{(pid=61962) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10020}} [ 1719.197292] env[61962]: DEBUG nova.compute.manager [None req-ba337279-4320-40ca-b616-7e590432f203 None None] [instance: 02c198ea-61de-4e70-866c-39781d353d04] Skipping network cache update for instance because it is Building. {{(pid=61962) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10020}} [ 1719.197292] env[61962]: DEBUG nova.compute.manager [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Didn't find any instances for network info cache update. {{(pid=61962) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10093}} [ 1719.197884] env[61962]: DEBUG oslo_service.periodic_task [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=61962) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1720.180854] env[61962]: DEBUG oslo_service.periodic_task [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Running periodic task ComputeManager._sync_scheduler_instance_info {{(pid=61962) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1721.158634] env[61962]: DEBUG oslo_service.periodic_task [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=61962) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1721.785841] env[61962]: WARNING oslo_vmware.rw_handles [None req-e958cfa4-b00d-4980-bc5a-c65b69f7ca2e tempest-FloatingIPsAssociationNegativeTestJSON-1886607009 tempest-FloatingIPsAssociationNegativeTestJSON-1886607009-project-member] Error occurred while reading the HTTP response.: http.client.RemoteDisconnected: Remote end closed connection without response [ 1721.785841] env[61962]: ERROR oslo_vmware.rw_handles Traceback (most recent call last): [ 1721.785841] env[61962]: ERROR oslo_vmware.rw_handles File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py", line 283, in close [ 1721.785841] env[61962]: ERROR oslo_vmware.rw_handles self._conn.getresponse() [ 1721.785841] env[61962]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 1375, in getresponse [ 1721.785841] env[61962]: ERROR oslo_vmware.rw_handles response.begin() [ 1721.785841] env[61962]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 318, in begin [ 1721.785841] env[61962]: ERROR oslo_vmware.rw_handles version, status, reason = self._read_status() [ 1721.785841] env[61962]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 287, in _read_status [ 1721.785841] env[61962]: ERROR oslo_vmware.rw_handles raise RemoteDisconnected("Remote end closed connection without" [ 1721.785841] env[61962]: ERROR oslo_vmware.rw_handles http.client.RemoteDisconnected: Remote end closed connection without response [ 1721.785841] env[61962]: ERROR oslo_vmware.rw_handles [ 1721.786675] env[61962]: DEBUG nova.virt.vmwareapi.images [None req-e958cfa4-b00d-4980-bc5a-c65b69f7ca2e tempest-FloatingIPsAssociationNegativeTestJSON-1886607009 tempest-FloatingIPsAssociationNegativeTestJSON-1886607009-project-member] [instance: 1f7429fe-e6f3-4b0a-bae8-38ead4607ae7] Downloaded image file data f684bb17-3ab2-4bd5-a19e-4c36c57d0ebe to vmware_temp/17bbf8bd-158c-4f73-a42f-38f109c632ad/f684bb17-3ab2-4bd5-a19e-4c36c57d0ebe/tmp-sparse.vmdk on the data store datastore2 {{(pid=61962) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:258}} [ 1721.788387] env[61962]: DEBUG nova.virt.vmwareapi.vmops [None req-e958cfa4-b00d-4980-bc5a-c65b69f7ca2e tempest-FloatingIPsAssociationNegativeTestJSON-1886607009 tempest-FloatingIPsAssociationNegativeTestJSON-1886607009-project-member] [instance: 1f7429fe-e6f3-4b0a-bae8-38ead4607ae7] Caching image {{(pid=61962) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 1721.788659] env[61962]: DEBUG nova.virt.vmwareapi.vm_util [None req-e958cfa4-b00d-4980-bc5a-c65b69f7ca2e tempest-FloatingIPsAssociationNegativeTestJSON-1886607009 tempest-FloatingIPsAssociationNegativeTestJSON-1886607009-project-member] Copying Virtual Disk [datastore2] vmware_temp/17bbf8bd-158c-4f73-a42f-38f109c632ad/f684bb17-3ab2-4bd5-a19e-4c36c57d0ebe/tmp-sparse.vmdk to [datastore2] vmware_temp/17bbf8bd-158c-4f73-a42f-38f109c632ad/f684bb17-3ab2-4bd5-a19e-4c36c57d0ebe/f684bb17-3ab2-4bd5-a19e-4c36c57d0ebe.vmdk {{(pid=61962) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 1721.788964] env[61962]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-0ab734ad-8bdd-4ff5-a99d-8c65069b95f6 {{(pid=61962) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1721.797917] env[61962]: DEBUG oslo_vmware.api [None req-e958cfa4-b00d-4980-bc5a-c65b69f7ca2e tempest-FloatingIPsAssociationNegativeTestJSON-1886607009 tempest-FloatingIPsAssociationNegativeTestJSON-1886607009-project-member] Waiting for the task: (returnval){ [ 1721.797917] env[61962]: value = "task-4892073" [ 1721.797917] env[61962]: _type = "Task" [ 1721.797917] env[61962]: } to complete. {{(pid=61962) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1721.807055] env[61962]: DEBUG oslo_vmware.api [None req-e958cfa4-b00d-4980-bc5a-c65b69f7ca2e tempest-FloatingIPsAssociationNegativeTestJSON-1886607009 tempest-FloatingIPsAssociationNegativeTestJSON-1886607009-project-member] Task: {'id': task-4892073, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61962) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1722.308458] env[61962]: DEBUG oslo_vmware.exceptions [None req-e958cfa4-b00d-4980-bc5a-c65b69f7ca2e tempest-FloatingIPsAssociationNegativeTestJSON-1886607009 tempest-FloatingIPsAssociationNegativeTestJSON-1886607009-project-member] Fault InvalidArgument not matched. {{(pid=61962) get_fault_class /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/exceptions.py:290}} [ 1722.308751] env[61962]: DEBUG oslo_concurrency.lockutils [None req-e958cfa4-b00d-4980-bc5a-c65b69f7ca2e tempest-FloatingIPsAssociationNegativeTestJSON-1886607009 tempest-FloatingIPsAssociationNegativeTestJSON-1886607009-project-member] Releasing lock "[datastore2] devstack-image-cache_base/f684bb17-3ab2-4bd5-a19e-4c36c57d0ebe/f684bb17-3ab2-4bd5-a19e-4c36c57d0ebe.vmdk" {{(pid=61962) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1722.309340] env[61962]: ERROR nova.compute.manager [None req-e958cfa4-b00d-4980-bc5a-c65b69f7ca2e tempest-FloatingIPsAssociationNegativeTestJSON-1886607009 tempest-FloatingIPsAssociationNegativeTestJSON-1886607009-project-member] [instance: 1f7429fe-e6f3-4b0a-bae8-38ead4607ae7] Instance failed to spawn: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1722.309340] env[61962]: Faults: ['InvalidArgument'] [ 1722.309340] env[61962]: ERROR nova.compute.manager [instance: 1f7429fe-e6f3-4b0a-bae8-38ead4607ae7] Traceback (most recent call last): [ 1722.309340] env[61962]: ERROR nova.compute.manager [instance: 1f7429fe-e6f3-4b0a-bae8-38ead4607ae7] File "/opt/stack/nova/nova/compute/manager.py", line 2886, in _build_resources [ 1722.309340] env[61962]: ERROR nova.compute.manager [instance: 1f7429fe-e6f3-4b0a-bae8-38ead4607ae7] yield resources [ 1722.309340] env[61962]: ERROR nova.compute.manager [instance: 1f7429fe-e6f3-4b0a-bae8-38ead4607ae7] File "/opt/stack/nova/nova/compute/manager.py", line 2633, in _build_and_run_instance [ 1722.309340] env[61962]: ERROR nova.compute.manager [instance: 1f7429fe-e6f3-4b0a-bae8-38ead4607ae7] self.driver.spawn(context, instance, image_meta, [ 1722.309340] env[61962]: ERROR nova.compute.manager [instance: 1f7429fe-e6f3-4b0a-bae8-38ead4607ae7] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 1722.309340] env[61962]: ERROR nova.compute.manager [instance: 1f7429fe-e6f3-4b0a-bae8-38ead4607ae7] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1722.309340] env[61962]: ERROR nova.compute.manager [instance: 1f7429fe-e6f3-4b0a-bae8-38ead4607ae7] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 1722.309340] env[61962]: ERROR nova.compute.manager [instance: 1f7429fe-e6f3-4b0a-bae8-38ead4607ae7] self._fetch_image_if_missing(context, vi) [ 1722.309340] env[61962]: ERROR nova.compute.manager [instance: 1f7429fe-e6f3-4b0a-bae8-38ead4607ae7] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 1722.309759] env[61962]: ERROR nova.compute.manager [instance: 1f7429fe-e6f3-4b0a-bae8-38ead4607ae7] image_cache(vi, tmp_image_ds_loc) [ 1722.309759] env[61962]: ERROR nova.compute.manager [instance: 1f7429fe-e6f3-4b0a-bae8-38ead4607ae7] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 1722.309759] env[61962]: ERROR nova.compute.manager [instance: 1f7429fe-e6f3-4b0a-bae8-38ead4607ae7] vm_util.copy_virtual_disk( [ 1722.309759] env[61962]: ERROR nova.compute.manager [instance: 1f7429fe-e6f3-4b0a-bae8-38ead4607ae7] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 1722.309759] env[61962]: ERROR nova.compute.manager [instance: 1f7429fe-e6f3-4b0a-bae8-38ead4607ae7] session._wait_for_task(vmdk_copy_task) [ 1722.309759] env[61962]: ERROR nova.compute.manager [instance: 1f7429fe-e6f3-4b0a-bae8-38ead4607ae7] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 1722.309759] env[61962]: ERROR nova.compute.manager [instance: 1f7429fe-e6f3-4b0a-bae8-38ead4607ae7] return self.wait_for_task(task_ref) [ 1722.309759] env[61962]: ERROR nova.compute.manager [instance: 1f7429fe-e6f3-4b0a-bae8-38ead4607ae7] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 1722.309759] env[61962]: ERROR nova.compute.manager [instance: 1f7429fe-e6f3-4b0a-bae8-38ead4607ae7] return evt.wait() [ 1722.309759] env[61962]: ERROR nova.compute.manager [instance: 1f7429fe-e6f3-4b0a-bae8-38ead4607ae7] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 1722.309759] env[61962]: ERROR nova.compute.manager [instance: 1f7429fe-e6f3-4b0a-bae8-38ead4607ae7] result = hub.switch() [ 1722.309759] env[61962]: ERROR nova.compute.manager [instance: 1f7429fe-e6f3-4b0a-bae8-38ead4607ae7] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 1722.309759] env[61962]: ERROR nova.compute.manager [instance: 1f7429fe-e6f3-4b0a-bae8-38ead4607ae7] return self.greenlet.switch() [ 1722.310241] env[61962]: ERROR nova.compute.manager [instance: 1f7429fe-e6f3-4b0a-bae8-38ead4607ae7] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 1722.310241] env[61962]: ERROR nova.compute.manager [instance: 1f7429fe-e6f3-4b0a-bae8-38ead4607ae7] self.f(*self.args, **self.kw) [ 1722.310241] env[61962]: ERROR nova.compute.manager [instance: 1f7429fe-e6f3-4b0a-bae8-38ead4607ae7] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 1722.310241] env[61962]: ERROR nova.compute.manager [instance: 1f7429fe-e6f3-4b0a-bae8-38ead4607ae7] raise exceptions.translate_fault(task_info.error) [ 1722.310241] env[61962]: ERROR nova.compute.manager [instance: 1f7429fe-e6f3-4b0a-bae8-38ead4607ae7] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1722.310241] env[61962]: ERROR nova.compute.manager [instance: 1f7429fe-e6f3-4b0a-bae8-38ead4607ae7] Faults: ['InvalidArgument'] [ 1722.310241] env[61962]: ERROR nova.compute.manager [instance: 1f7429fe-e6f3-4b0a-bae8-38ead4607ae7] [ 1722.310241] env[61962]: INFO nova.compute.manager [None req-e958cfa4-b00d-4980-bc5a-c65b69f7ca2e tempest-FloatingIPsAssociationNegativeTestJSON-1886607009 tempest-FloatingIPsAssociationNegativeTestJSON-1886607009-project-member] [instance: 1f7429fe-e6f3-4b0a-bae8-38ead4607ae7] Terminating instance [ 1722.311263] env[61962]: DEBUG oslo_concurrency.lockutils [None req-6a898c7a-d8c8-4cee-8be6-7d5d37c58c60 tempest-ServerPasswordTestJSON-1801851891 tempest-ServerPasswordTestJSON-1801851891-project-member] Acquired lock "[datastore2] devstack-image-cache_base/f684bb17-3ab2-4bd5-a19e-4c36c57d0ebe/f684bb17-3ab2-4bd5-a19e-4c36c57d0ebe.vmdk" {{(pid=61962) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1722.311488] env[61962]: DEBUG nova.virt.vmwareapi.ds_util [None req-6a898c7a-d8c8-4cee-8be6-7d5d37c58c60 tempest-ServerPasswordTestJSON-1801851891 tempest-ServerPasswordTestJSON-1801851891-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=61962) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1722.311728] env[61962]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-b93e61fa-94d0-468f-810d-1e78ab28b002 {{(pid=61962) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1722.313995] env[61962]: DEBUG nova.compute.manager [None req-e958cfa4-b00d-4980-bc5a-c65b69f7ca2e tempest-FloatingIPsAssociationNegativeTestJSON-1886607009 tempest-FloatingIPsAssociationNegativeTestJSON-1886607009-project-member] [instance: 1f7429fe-e6f3-4b0a-bae8-38ead4607ae7] Start destroying the instance on the hypervisor. {{(pid=61962) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3142}} [ 1722.314205] env[61962]: DEBUG nova.virt.vmwareapi.vmops [None req-e958cfa4-b00d-4980-bc5a-c65b69f7ca2e tempest-FloatingIPsAssociationNegativeTestJSON-1886607009 tempest-FloatingIPsAssociationNegativeTestJSON-1886607009-project-member] [instance: 1f7429fe-e6f3-4b0a-bae8-38ead4607ae7] Destroying instance {{(pid=61962) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1722.314928] env[61962]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2c54d79b-0ed5-4ff8-89f1-2d580d7fb9e0 {{(pid=61962) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1722.323200] env[61962]: DEBUG nova.virt.vmwareapi.vmops [None req-e958cfa4-b00d-4980-bc5a-c65b69f7ca2e tempest-FloatingIPsAssociationNegativeTestJSON-1886607009 tempest-FloatingIPsAssociationNegativeTestJSON-1886607009-project-member] [instance: 1f7429fe-e6f3-4b0a-bae8-38ead4607ae7] Unregistering the VM {{(pid=61962) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 1722.323414] env[61962]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-4b236230-917c-48d2-a723-363def590249 {{(pid=61962) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1722.325664] env[61962]: DEBUG nova.virt.vmwareapi.ds_util [None req-6a898c7a-d8c8-4cee-8be6-7d5d37c58c60 tempest-ServerPasswordTestJSON-1801851891 tempest-ServerPasswordTestJSON-1801851891-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=61962) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1722.325833] env[61962]: DEBUG nova.virt.vmwareapi.vmops [None req-6a898c7a-d8c8-4cee-8be6-7d5d37c58c60 tempest-ServerPasswordTestJSON-1801851891 tempest-ServerPasswordTestJSON-1801851891-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=61962) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 1722.326817] env[61962]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-39b26c8d-e171-46e0-b42f-f8121a255701 {{(pid=61962) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1722.332092] env[61962]: DEBUG oslo_vmware.api [None req-6a898c7a-d8c8-4cee-8be6-7d5d37c58c60 tempest-ServerPasswordTestJSON-1801851891 tempest-ServerPasswordTestJSON-1801851891-project-member] Waiting for the task: (returnval){ [ 1722.332092] env[61962]: value = "session[5210918f-aadc-9b29-42fa-0929c2e42627]52debbf2-05e7-8f67-b385-68806d570df4" [ 1722.332092] env[61962]: _type = "Task" [ 1722.332092] env[61962]: } to complete. {{(pid=61962) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1722.341940] env[61962]: DEBUG oslo_vmware.api [None req-6a898c7a-d8c8-4cee-8be6-7d5d37c58c60 tempest-ServerPasswordTestJSON-1801851891 tempest-ServerPasswordTestJSON-1801851891-project-member] Task: {'id': session[5210918f-aadc-9b29-42fa-0929c2e42627]52debbf2-05e7-8f67-b385-68806d570df4, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61962) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1722.394221] env[61962]: DEBUG nova.virt.vmwareapi.vmops [None req-e958cfa4-b00d-4980-bc5a-c65b69f7ca2e tempest-FloatingIPsAssociationNegativeTestJSON-1886607009 tempest-FloatingIPsAssociationNegativeTestJSON-1886607009-project-member] [instance: 1f7429fe-e6f3-4b0a-bae8-38ead4607ae7] Unregistered the VM {{(pid=61962) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 1722.394464] env[61962]: DEBUG nova.virt.vmwareapi.vmops [None req-e958cfa4-b00d-4980-bc5a-c65b69f7ca2e tempest-FloatingIPsAssociationNegativeTestJSON-1886607009 tempest-FloatingIPsAssociationNegativeTestJSON-1886607009-project-member] [instance: 1f7429fe-e6f3-4b0a-bae8-38ead4607ae7] Deleting contents of the VM from datastore datastore2 {{(pid=61962) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 1722.394646] env[61962]: DEBUG nova.virt.vmwareapi.ds_util [None req-e958cfa4-b00d-4980-bc5a-c65b69f7ca2e tempest-FloatingIPsAssociationNegativeTestJSON-1886607009 tempest-FloatingIPsAssociationNegativeTestJSON-1886607009-project-member] Deleting the datastore file [datastore2] 1f7429fe-e6f3-4b0a-bae8-38ead4607ae7 {{(pid=61962) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1722.394931] env[61962]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-c2dc8785-54d4-4af3-83dd-47f46af3c456 {{(pid=61962) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1722.402490] env[61962]: DEBUG oslo_vmware.api [None req-e958cfa4-b00d-4980-bc5a-c65b69f7ca2e tempest-FloatingIPsAssociationNegativeTestJSON-1886607009 tempest-FloatingIPsAssociationNegativeTestJSON-1886607009-project-member] Waiting for the task: (returnval){ [ 1722.402490] env[61962]: value = "task-4892075" [ 1722.402490] env[61962]: _type = "Task" [ 1722.402490] env[61962]: } to complete. {{(pid=61962) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1722.411137] env[61962]: DEBUG oslo_vmware.api [None req-e958cfa4-b00d-4980-bc5a-c65b69f7ca2e tempest-FloatingIPsAssociationNegativeTestJSON-1886607009 tempest-FloatingIPsAssociationNegativeTestJSON-1886607009-project-member] Task: {'id': task-4892075, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61962) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1722.842293] env[61962]: DEBUG nova.virt.vmwareapi.vmops [None req-6a898c7a-d8c8-4cee-8be6-7d5d37c58c60 tempest-ServerPasswordTestJSON-1801851891 tempest-ServerPasswordTestJSON-1801851891-project-member] [instance: c3c36949-754e-4bab-860d-74e0cde4dfd6] Preparing fetch location {{(pid=61962) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 1722.842560] env[61962]: DEBUG nova.virt.vmwareapi.ds_util [None req-6a898c7a-d8c8-4cee-8be6-7d5d37c58c60 tempest-ServerPasswordTestJSON-1801851891 tempest-ServerPasswordTestJSON-1801851891-project-member] Creating directory with path [datastore2] vmware_temp/f7720ed6-8663-4e6e-864d-d02c2f6afa0c/f684bb17-3ab2-4bd5-a19e-4c36c57d0ebe {{(pid=61962) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1722.842797] env[61962]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-908f48e7-fcfa-428e-be64-34f76089a623 {{(pid=61962) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1722.855117] env[61962]: DEBUG nova.virt.vmwareapi.ds_util [None req-6a898c7a-d8c8-4cee-8be6-7d5d37c58c60 tempest-ServerPasswordTestJSON-1801851891 tempest-ServerPasswordTestJSON-1801851891-project-member] Created directory with path [datastore2] vmware_temp/f7720ed6-8663-4e6e-864d-d02c2f6afa0c/f684bb17-3ab2-4bd5-a19e-4c36c57d0ebe {{(pid=61962) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1722.855347] env[61962]: DEBUG nova.virt.vmwareapi.vmops [None req-6a898c7a-d8c8-4cee-8be6-7d5d37c58c60 tempest-ServerPasswordTestJSON-1801851891 tempest-ServerPasswordTestJSON-1801851891-project-member] [instance: c3c36949-754e-4bab-860d-74e0cde4dfd6] Fetch image to [datastore2] vmware_temp/f7720ed6-8663-4e6e-864d-d02c2f6afa0c/f684bb17-3ab2-4bd5-a19e-4c36c57d0ebe/tmp-sparse.vmdk {{(pid=61962) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 1722.855525] env[61962]: DEBUG nova.virt.vmwareapi.vmops [None req-6a898c7a-d8c8-4cee-8be6-7d5d37c58c60 tempest-ServerPasswordTestJSON-1801851891 tempest-ServerPasswordTestJSON-1801851891-project-member] [instance: c3c36949-754e-4bab-860d-74e0cde4dfd6] Downloading image file data f684bb17-3ab2-4bd5-a19e-4c36c57d0ebe to [datastore2] vmware_temp/f7720ed6-8663-4e6e-864d-d02c2f6afa0c/f684bb17-3ab2-4bd5-a19e-4c36c57d0ebe/tmp-sparse.vmdk on the data store datastore2 {{(pid=61962) _fetch_image_as_file /opt/stack/nova/nova/virt/vmwareapi/vmops.py:399}} [ 1722.856358] env[61962]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e1ca1342-b77b-40ad-83ea-ef03689585ec {{(pid=61962) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1722.863686] env[61962]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ab03ae7d-9154-452c-8035-d5dbbae35667 {{(pid=61962) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1722.875557] env[61962]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-21374f2d-f8bb-4ad2-97f6-745822cf83bd {{(pid=61962) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1722.927476] env[61962]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-55c0c96e-180a-415b-9f99-1739150586ff {{(pid=61962) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1722.937799] env[61962]: DEBUG oslo_vmware.service [-] Invoking SessionManager.AcquireGenericServiceTicket with opID=oslo.vmware-d0277ffa-32be-41ff-968e-66474b99e73f {{(pid=61962) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1722.939092] env[61962]: DEBUG oslo_vmware.api [None req-e958cfa4-b00d-4980-bc5a-c65b69f7ca2e tempest-FloatingIPsAssociationNegativeTestJSON-1886607009 tempest-FloatingIPsAssociationNegativeTestJSON-1886607009-project-member] Task: {'id': task-4892075, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.10764} completed successfully. {{(pid=61962) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1722.939345] env[61962]: DEBUG nova.virt.vmwareapi.ds_util [None req-e958cfa4-b00d-4980-bc5a-c65b69f7ca2e tempest-FloatingIPsAssociationNegativeTestJSON-1886607009 tempest-FloatingIPsAssociationNegativeTestJSON-1886607009-project-member] Deleted the datastore file {{(pid=61962) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1722.939523] env[61962]: DEBUG nova.virt.vmwareapi.vmops [None req-e958cfa4-b00d-4980-bc5a-c65b69f7ca2e tempest-FloatingIPsAssociationNegativeTestJSON-1886607009 tempest-FloatingIPsAssociationNegativeTestJSON-1886607009-project-member] [instance: 1f7429fe-e6f3-4b0a-bae8-38ead4607ae7] Deleted contents of the VM from datastore datastore2 {{(pid=61962) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 1722.939696] env[61962]: DEBUG nova.virt.vmwareapi.vmops [None req-e958cfa4-b00d-4980-bc5a-c65b69f7ca2e tempest-FloatingIPsAssociationNegativeTestJSON-1886607009 tempest-FloatingIPsAssociationNegativeTestJSON-1886607009-project-member] [instance: 1f7429fe-e6f3-4b0a-bae8-38ead4607ae7] Instance destroyed {{(pid=61962) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1722.939863] env[61962]: INFO nova.compute.manager [None req-e958cfa4-b00d-4980-bc5a-c65b69f7ca2e tempest-FloatingIPsAssociationNegativeTestJSON-1886607009 tempest-FloatingIPsAssociationNegativeTestJSON-1886607009-project-member] [instance: 1f7429fe-e6f3-4b0a-bae8-38ead4607ae7] Took 0.63 seconds to destroy the instance on the hypervisor. [ 1722.942161] env[61962]: DEBUG nova.compute.claims [None req-e958cfa4-b00d-4980-bc5a-c65b69f7ca2e tempest-FloatingIPsAssociationNegativeTestJSON-1886607009 tempest-FloatingIPsAssociationNegativeTestJSON-1886607009-project-member] [instance: 1f7429fe-e6f3-4b0a-bae8-38ead4607ae7] Aborting claim: {{(pid=61962) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 1722.942326] env[61962]: DEBUG oslo_concurrency.lockutils [None req-e958cfa4-b00d-4980-bc5a-c65b69f7ca2e tempest-FloatingIPsAssociationNegativeTestJSON-1886607009 tempest-FloatingIPsAssociationNegativeTestJSON-1886607009-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=61962) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1722.942541] env[61962]: DEBUG oslo_concurrency.lockutils [None req-e958cfa4-b00d-4980-bc5a-c65b69f7ca2e tempest-FloatingIPsAssociationNegativeTestJSON-1886607009 tempest-FloatingIPsAssociationNegativeTestJSON-1886607009-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 0.000s {{(pid=61962) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1722.964250] env[61962]: DEBUG nova.virt.vmwareapi.images [None req-6a898c7a-d8c8-4cee-8be6-7d5d37c58c60 tempest-ServerPasswordTestJSON-1801851891 tempest-ServerPasswordTestJSON-1801851891-project-member] [instance: c3c36949-754e-4bab-860d-74e0cde4dfd6] Downloading image file data f684bb17-3ab2-4bd5-a19e-4c36c57d0ebe to the data store datastore2 {{(pid=61962) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:245}} [ 1723.043051] env[61962]: DEBUG oslo_vmware.rw_handles [None req-6a898c7a-d8c8-4cee-8be6-7d5d37c58c60 tempest-ServerPasswordTestJSON-1801851891 tempest-ServerPasswordTestJSON-1801851891-project-member] Creating HTTP connection to write to file with size = 21318656 and URL = https://esx7c2n3.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/f7720ed6-8663-4e6e-864d-d02c2f6afa0c/f684bb17-3ab2-4bd5-a19e-4c36c57d0ebe/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=61962) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 1723.109505] env[61962]: DEBUG oslo_vmware.rw_handles [None req-6a898c7a-d8c8-4cee-8be6-7d5d37c58c60 tempest-ServerPasswordTestJSON-1801851891 tempest-ServerPasswordTestJSON-1801851891-project-member] Completed reading data from the image iterator. {{(pid=61962) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 1723.109505] env[61962]: DEBUG oslo_vmware.rw_handles [None req-6a898c7a-d8c8-4cee-8be6-7d5d37c58c60 tempest-ServerPasswordTestJSON-1801851891 tempest-ServerPasswordTestJSON-1801851891-project-member] Closing write handle for https://esx7c2n3.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/f7720ed6-8663-4e6e-864d-d02c2f6afa0c/f684bb17-3ab2-4bd5-a19e-4c36c57d0ebe/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=61962) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:281}} [ 1723.158118] env[61962]: DEBUG oslo_service.periodic_task [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=61962) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1723.217229] env[61962]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c5db7565-329e-4087-8ef2-cab7d09ec635 {{(pid=61962) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1723.227067] env[61962]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-609d03d2-4074-4411-92b7-d794829f2f07 {{(pid=61962) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1723.257568] env[61962]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bf703ed0-4741-48f9-b2f8-f23a7b8eedca {{(pid=61962) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1723.265650] env[61962]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f89c3e57-c1e6-4e2d-824d-c3742525454d {{(pid=61962) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1723.280018] env[61962]: DEBUG nova.compute.provider_tree [None req-e958cfa4-b00d-4980-bc5a-c65b69f7ca2e tempest-FloatingIPsAssociationNegativeTestJSON-1886607009 tempest-FloatingIPsAssociationNegativeTestJSON-1886607009-project-member] Inventory has not changed in ProviderTree for provider: 5a20dc57-fddd-49ec-bab5-953a03eebaa1 {{(pid=61962) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1723.290248] env[61962]: DEBUG nova.scheduler.client.report [None req-e958cfa4-b00d-4980-bc5a-c65b69f7ca2e tempest-FloatingIPsAssociationNegativeTestJSON-1886607009 tempest-FloatingIPsAssociationNegativeTestJSON-1886607009-project-member] Inventory has not changed for provider 5a20dc57-fddd-49ec-bab5-953a03eebaa1 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 96, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61962) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1723.304416] env[61962]: DEBUG oslo_concurrency.lockutils [None req-e958cfa4-b00d-4980-bc5a-c65b69f7ca2e tempest-FloatingIPsAssociationNegativeTestJSON-1886607009 tempest-FloatingIPsAssociationNegativeTestJSON-1886607009-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 0.362s {{(pid=61962) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1723.304955] env[61962]: ERROR nova.compute.manager [None req-e958cfa4-b00d-4980-bc5a-c65b69f7ca2e tempest-FloatingIPsAssociationNegativeTestJSON-1886607009 tempest-FloatingIPsAssociationNegativeTestJSON-1886607009-project-member] [instance: 1f7429fe-e6f3-4b0a-bae8-38ead4607ae7] Failed to build and run instance: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1723.304955] env[61962]: Faults: ['InvalidArgument'] [ 1723.304955] env[61962]: ERROR nova.compute.manager [instance: 1f7429fe-e6f3-4b0a-bae8-38ead4607ae7] Traceback (most recent call last): [ 1723.304955] env[61962]: ERROR nova.compute.manager [instance: 1f7429fe-e6f3-4b0a-bae8-38ead4607ae7] File "/opt/stack/nova/nova/compute/manager.py", line 2633, in _build_and_run_instance [ 1723.304955] env[61962]: ERROR nova.compute.manager [instance: 1f7429fe-e6f3-4b0a-bae8-38ead4607ae7] self.driver.spawn(context, instance, image_meta, [ 1723.304955] env[61962]: ERROR nova.compute.manager [instance: 1f7429fe-e6f3-4b0a-bae8-38ead4607ae7] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 1723.304955] env[61962]: ERROR nova.compute.manager [instance: 1f7429fe-e6f3-4b0a-bae8-38ead4607ae7] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1723.304955] env[61962]: ERROR nova.compute.manager [instance: 1f7429fe-e6f3-4b0a-bae8-38ead4607ae7] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 1723.304955] env[61962]: ERROR nova.compute.manager [instance: 1f7429fe-e6f3-4b0a-bae8-38ead4607ae7] self._fetch_image_if_missing(context, vi) [ 1723.304955] env[61962]: ERROR nova.compute.manager [instance: 1f7429fe-e6f3-4b0a-bae8-38ead4607ae7] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 1723.304955] env[61962]: ERROR nova.compute.manager [instance: 1f7429fe-e6f3-4b0a-bae8-38ead4607ae7] image_cache(vi, tmp_image_ds_loc) [ 1723.304955] env[61962]: ERROR nova.compute.manager [instance: 1f7429fe-e6f3-4b0a-bae8-38ead4607ae7] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 1723.305385] env[61962]: ERROR nova.compute.manager [instance: 1f7429fe-e6f3-4b0a-bae8-38ead4607ae7] vm_util.copy_virtual_disk( [ 1723.305385] env[61962]: ERROR nova.compute.manager [instance: 1f7429fe-e6f3-4b0a-bae8-38ead4607ae7] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 1723.305385] env[61962]: ERROR nova.compute.manager [instance: 1f7429fe-e6f3-4b0a-bae8-38ead4607ae7] session._wait_for_task(vmdk_copy_task) [ 1723.305385] env[61962]: ERROR nova.compute.manager [instance: 1f7429fe-e6f3-4b0a-bae8-38ead4607ae7] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 1723.305385] env[61962]: ERROR nova.compute.manager [instance: 1f7429fe-e6f3-4b0a-bae8-38ead4607ae7] return self.wait_for_task(task_ref) [ 1723.305385] env[61962]: ERROR nova.compute.manager [instance: 1f7429fe-e6f3-4b0a-bae8-38ead4607ae7] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 1723.305385] env[61962]: ERROR nova.compute.manager [instance: 1f7429fe-e6f3-4b0a-bae8-38ead4607ae7] return evt.wait() [ 1723.305385] env[61962]: ERROR nova.compute.manager [instance: 1f7429fe-e6f3-4b0a-bae8-38ead4607ae7] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 1723.305385] env[61962]: ERROR nova.compute.manager [instance: 1f7429fe-e6f3-4b0a-bae8-38ead4607ae7] result = hub.switch() [ 1723.305385] env[61962]: ERROR nova.compute.manager [instance: 1f7429fe-e6f3-4b0a-bae8-38ead4607ae7] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 1723.305385] env[61962]: ERROR nova.compute.manager [instance: 1f7429fe-e6f3-4b0a-bae8-38ead4607ae7] return self.greenlet.switch() [ 1723.305385] env[61962]: ERROR nova.compute.manager [instance: 1f7429fe-e6f3-4b0a-bae8-38ead4607ae7] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 1723.305385] env[61962]: ERROR nova.compute.manager [instance: 1f7429fe-e6f3-4b0a-bae8-38ead4607ae7] self.f(*self.args, **self.kw) [ 1723.306186] env[61962]: ERROR nova.compute.manager [instance: 1f7429fe-e6f3-4b0a-bae8-38ead4607ae7] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 1723.306186] env[61962]: ERROR nova.compute.manager [instance: 1f7429fe-e6f3-4b0a-bae8-38ead4607ae7] raise exceptions.translate_fault(task_info.error) [ 1723.306186] env[61962]: ERROR nova.compute.manager [instance: 1f7429fe-e6f3-4b0a-bae8-38ead4607ae7] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1723.306186] env[61962]: ERROR nova.compute.manager [instance: 1f7429fe-e6f3-4b0a-bae8-38ead4607ae7] Faults: ['InvalidArgument'] [ 1723.306186] env[61962]: ERROR nova.compute.manager [instance: 1f7429fe-e6f3-4b0a-bae8-38ead4607ae7] [ 1723.306186] env[61962]: DEBUG nova.compute.utils [None req-e958cfa4-b00d-4980-bc5a-c65b69f7ca2e tempest-FloatingIPsAssociationNegativeTestJSON-1886607009 tempest-FloatingIPsAssociationNegativeTestJSON-1886607009-project-member] [instance: 1f7429fe-e6f3-4b0a-bae8-38ead4607ae7] VimFaultException {{(pid=61962) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 1723.307380] env[61962]: DEBUG nova.compute.manager [None req-e958cfa4-b00d-4980-bc5a-c65b69f7ca2e tempest-FloatingIPsAssociationNegativeTestJSON-1886607009 tempest-FloatingIPsAssociationNegativeTestJSON-1886607009-project-member] [instance: 1f7429fe-e6f3-4b0a-bae8-38ead4607ae7] Build of instance 1f7429fe-e6f3-4b0a-bae8-38ead4607ae7 was re-scheduled: A specified parameter was not correct: fileType [ 1723.307380] env[61962]: Faults: ['InvalidArgument'] {{(pid=61962) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2472}} [ 1723.307782] env[61962]: DEBUG nova.compute.manager [None req-e958cfa4-b00d-4980-bc5a-c65b69f7ca2e tempest-FloatingIPsAssociationNegativeTestJSON-1886607009 tempest-FloatingIPsAssociationNegativeTestJSON-1886607009-project-member] [instance: 1f7429fe-e6f3-4b0a-bae8-38ead4607ae7] Unplugging VIFs for instance {{(pid=61962) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:2998}} [ 1723.307958] env[61962]: DEBUG nova.compute.manager [None req-e958cfa4-b00d-4980-bc5a-c65b69f7ca2e tempest-FloatingIPsAssociationNegativeTestJSON-1886607009 tempest-FloatingIPsAssociationNegativeTestJSON-1886607009-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=61962) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3021}} [ 1723.308158] env[61962]: DEBUG nova.compute.manager [None req-e958cfa4-b00d-4980-bc5a-c65b69f7ca2e tempest-FloatingIPsAssociationNegativeTestJSON-1886607009 tempest-FloatingIPsAssociationNegativeTestJSON-1886607009-project-member] [instance: 1f7429fe-e6f3-4b0a-bae8-38ead4607ae7] Deallocating network for instance {{(pid=61962) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2281}} [ 1723.308360] env[61962]: DEBUG nova.network.neutron [None req-e958cfa4-b00d-4980-bc5a-c65b69f7ca2e tempest-FloatingIPsAssociationNegativeTestJSON-1886607009 tempest-FloatingIPsAssociationNegativeTestJSON-1886607009-project-member] [instance: 1f7429fe-e6f3-4b0a-bae8-38ead4607ae7] deallocate_for_instance() {{(pid=61962) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1723.760579] env[61962]: DEBUG nova.network.neutron [None req-e958cfa4-b00d-4980-bc5a-c65b69f7ca2e tempest-FloatingIPsAssociationNegativeTestJSON-1886607009 tempest-FloatingIPsAssociationNegativeTestJSON-1886607009-project-member] [instance: 1f7429fe-e6f3-4b0a-bae8-38ead4607ae7] Updating instance_info_cache with network_info: [] {{(pid=61962) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1723.771301] env[61962]: INFO nova.compute.manager [None req-e958cfa4-b00d-4980-bc5a-c65b69f7ca2e tempest-FloatingIPsAssociationNegativeTestJSON-1886607009 tempest-FloatingIPsAssociationNegativeTestJSON-1886607009-project-member] [instance: 1f7429fe-e6f3-4b0a-bae8-38ead4607ae7] Took 0.46 seconds to deallocate network for instance. [ 1723.905991] env[61962]: INFO nova.scheduler.client.report [None req-e958cfa4-b00d-4980-bc5a-c65b69f7ca2e tempest-FloatingIPsAssociationNegativeTestJSON-1886607009 tempest-FloatingIPsAssociationNegativeTestJSON-1886607009-project-member] Deleted allocations for instance 1f7429fe-e6f3-4b0a-bae8-38ead4607ae7 [ 1723.936979] env[61962]: DEBUG oslo_concurrency.lockutils [None req-e958cfa4-b00d-4980-bc5a-c65b69f7ca2e tempest-FloatingIPsAssociationNegativeTestJSON-1886607009 tempest-FloatingIPsAssociationNegativeTestJSON-1886607009-project-member] Lock "1f7429fe-e6f3-4b0a-bae8-38ead4607ae7" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 614.533s {{(pid=61962) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1723.938239] env[61962]: DEBUG oslo_concurrency.lockutils [None req-81384ae4-31b1-40ea-af83-ed0d8b62047c tempest-FloatingIPsAssociationNegativeTestJSON-1886607009 tempest-FloatingIPsAssociationNegativeTestJSON-1886607009-project-member] Lock "1f7429fe-e6f3-4b0a-bae8-38ead4607ae7" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 418.905s {{(pid=61962) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1723.938465] env[61962]: DEBUG oslo_concurrency.lockutils [None req-81384ae4-31b1-40ea-af83-ed0d8b62047c tempest-FloatingIPsAssociationNegativeTestJSON-1886607009 tempest-FloatingIPsAssociationNegativeTestJSON-1886607009-project-member] Acquiring lock "1f7429fe-e6f3-4b0a-bae8-38ead4607ae7-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=61962) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1723.939962] env[61962]: DEBUG oslo_concurrency.lockutils [None req-81384ae4-31b1-40ea-af83-ed0d8b62047c tempest-FloatingIPsAssociationNegativeTestJSON-1886607009 tempest-FloatingIPsAssociationNegativeTestJSON-1886607009-project-member] Lock "1f7429fe-e6f3-4b0a-bae8-38ead4607ae7-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.001s {{(pid=61962) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1723.940266] env[61962]: DEBUG oslo_concurrency.lockutils [None req-81384ae4-31b1-40ea-af83-ed0d8b62047c tempest-FloatingIPsAssociationNegativeTestJSON-1886607009 tempest-FloatingIPsAssociationNegativeTestJSON-1886607009-project-member] Lock "1f7429fe-e6f3-4b0a-bae8-38ead4607ae7-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.001s {{(pid=61962) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1723.943906] env[61962]: INFO nova.compute.manager [None req-81384ae4-31b1-40ea-af83-ed0d8b62047c tempest-FloatingIPsAssociationNegativeTestJSON-1886607009 tempest-FloatingIPsAssociationNegativeTestJSON-1886607009-project-member] [instance: 1f7429fe-e6f3-4b0a-bae8-38ead4607ae7] Terminating instance [ 1723.946606] env[61962]: DEBUG nova.compute.manager [None req-81384ae4-31b1-40ea-af83-ed0d8b62047c tempest-FloatingIPsAssociationNegativeTestJSON-1886607009 tempest-FloatingIPsAssociationNegativeTestJSON-1886607009-project-member] [instance: 1f7429fe-e6f3-4b0a-bae8-38ead4607ae7] Start destroying the instance on the hypervisor. {{(pid=61962) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3142}} [ 1723.947017] env[61962]: DEBUG nova.virt.vmwareapi.vmops [None req-81384ae4-31b1-40ea-af83-ed0d8b62047c tempest-FloatingIPsAssociationNegativeTestJSON-1886607009 tempest-FloatingIPsAssociationNegativeTestJSON-1886607009-project-member] [instance: 1f7429fe-e6f3-4b0a-bae8-38ead4607ae7] Destroying instance {{(pid=61962) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1723.947421] env[61962]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-b5280bf4-6f9a-4f1a-b592-b416e69d427d {{(pid=61962) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1723.959422] env[61962]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ab00da8d-307d-4cb1-aff7-7f9aac0c7e89 {{(pid=61962) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1723.974894] env[61962]: DEBUG nova.compute.manager [None req-36219049-67ba-4c74-811b-90c296d8053e tempest-ImagesTestJSON-240028984 tempest-ImagesTestJSON-240028984-project-member] [instance: 85003695-cb3e-4bce-9a3a-a1d799b36369] Starting instance... {{(pid=61962) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2424}} [ 1724.007770] env[61962]: WARNING nova.virt.vmwareapi.vmops [None req-81384ae4-31b1-40ea-af83-ed0d8b62047c tempest-FloatingIPsAssociationNegativeTestJSON-1886607009 tempest-FloatingIPsAssociationNegativeTestJSON-1886607009-project-member] [instance: 1f7429fe-e6f3-4b0a-bae8-38ead4607ae7] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 1f7429fe-e6f3-4b0a-bae8-38ead4607ae7 could not be found. [ 1724.008039] env[61962]: DEBUG nova.virt.vmwareapi.vmops [None req-81384ae4-31b1-40ea-af83-ed0d8b62047c tempest-FloatingIPsAssociationNegativeTestJSON-1886607009 tempest-FloatingIPsAssociationNegativeTestJSON-1886607009-project-member] [instance: 1f7429fe-e6f3-4b0a-bae8-38ead4607ae7] Instance destroyed {{(pid=61962) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1724.008236] env[61962]: INFO nova.compute.manager [None req-81384ae4-31b1-40ea-af83-ed0d8b62047c tempest-FloatingIPsAssociationNegativeTestJSON-1886607009 tempest-FloatingIPsAssociationNegativeTestJSON-1886607009-project-member] [instance: 1f7429fe-e6f3-4b0a-bae8-38ead4607ae7] Took 0.06 seconds to destroy the instance on the hypervisor. [ 1724.009147] env[61962]: DEBUG oslo.service.loopingcall [None req-81384ae4-31b1-40ea-af83-ed0d8b62047c tempest-FloatingIPsAssociationNegativeTestJSON-1886607009 tempest-FloatingIPsAssociationNegativeTestJSON-1886607009-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61962) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1724.013024] env[61962]: DEBUG nova.compute.manager [-] [instance: 1f7429fe-e6f3-4b0a-bae8-38ead4607ae7] Deallocating network for instance {{(pid=61962) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2281}} [ 1724.013024] env[61962]: DEBUG nova.network.neutron [-] [instance: 1f7429fe-e6f3-4b0a-bae8-38ead4607ae7] deallocate_for_instance() {{(pid=61962) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1724.042658] env[61962]: DEBUG nova.network.neutron [-] [instance: 1f7429fe-e6f3-4b0a-bae8-38ead4607ae7] Updating instance_info_cache with network_info: [] {{(pid=61962) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1724.059665] env[61962]: INFO nova.compute.manager [-] [instance: 1f7429fe-e6f3-4b0a-bae8-38ead4607ae7] Took 0.05 seconds to deallocate network for instance. [ 1724.074681] env[61962]: DEBUG oslo_concurrency.lockutils [None req-36219049-67ba-4c74-811b-90c296d8053e tempest-ImagesTestJSON-240028984 tempest-ImagesTestJSON-240028984-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61962) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1724.074977] env[61962]: DEBUG oslo_concurrency.lockutils [None req-36219049-67ba-4c74-811b-90c296d8053e tempest-ImagesTestJSON-240028984 tempest-ImagesTestJSON-240028984-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=61962) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1724.076723] env[61962]: INFO nova.compute.claims [None req-36219049-67ba-4c74-811b-90c296d8053e tempest-ImagesTestJSON-240028984 tempest-ImagesTestJSON-240028984-project-member] [instance: 85003695-cb3e-4bce-9a3a-a1d799b36369] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1724.171066] env[61962]: DEBUG oslo_concurrency.lockutils [None req-81384ae4-31b1-40ea-af83-ed0d8b62047c tempest-FloatingIPsAssociationNegativeTestJSON-1886607009 tempest-FloatingIPsAssociationNegativeTestJSON-1886607009-project-member] Lock "1f7429fe-e6f3-4b0a-bae8-38ead4607ae7" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 0.232s {{(pid=61962) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1724.291487] env[61962]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ae2fcf2f-7a34-4494-8d71-b42d66c64a4d {{(pid=61962) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1724.300820] env[61962]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-89d7e5aa-7de2-43f6-9454-c9d5a4bbd4b5 {{(pid=61962) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1724.335195] env[61962]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-30c11e4a-5fd7-4822-9abd-6c5801077094 {{(pid=61962) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1724.343564] env[61962]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cb6e3fff-693f-41ea-a4e8-5dc39d2a5891 {{(pid=61962) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1724.358280] env[61962]: DEBUG nova.compute.provider_tree [None req-36219049-67ba-4c74-811b-90c296d8053e tempest-ImagesTestJSON-240028984 tempest-ImagesTestJSON-240028984-project-member] Inventory has not changed in ProviderTree for provider: 5a20dc57-fddd-49ec-bab5-953a03eebaa1 {{(pid=61962) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1724.368842] env[61962]: DEBUG nova.scheduler.client.report [None req-36219049-67ba-4c74-811b-90c296d8053e tempest-ImagesTestJSON-240028984 tempest-ImagesTestJSON-240028984-project-member] Inventory has not changed for provider 5a20dc57-fddd-49ec-bab5-953a03eebaa1 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 96, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61962) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1724.385924] env[61962]: DEBUG oslo_concurrency.lockutils [None req-36219049-67ba-4c74-811b-90c296d8053e tempest-ImagesTestJSON-240028984 tempest-ImagesTestJSON-240028984-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.311s {{(pid=61962) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1724.386463] env[61962]: DEBUG nova.compute.manager [None req-36219049-67ba-4c74-811b-90c296d8053e tempest-ImagesTestJSON-240028984 tempest-ImagesTestJSON-240028984-project-member] [instance: 85003695-cb3e-4bce-9a3a-a1d799b36369] Start building networks asynchronously for instance. {{(pid=61962) _build_resources /opt/stack/nova/nova/compute/manager.py:2821}} [ 1724.425159] env[61962]: DEBUG nova.compute.utils [None req-36219049-67ba-4c74-811b-90c296d8053e tempest-ImagesTestJSON-240028984 tempest-ImagesTestJSON-240028984-project-member] Using /dev/sd instead of None {{(pid=61962) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1724.427837] env[61962]: DEBUG nova.compute.manager [None req-36219049-67ba-4c74-811b-90c296d8053e tempest-ImagesTestJSON-240028984 tempest-ImagesTestJSON-240028984-project-member] [instance: 85003695-cb3e-4bce-9a3a-a1d799b36369] Allocating IP information in the background. {{(pid=61962) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1973}} [ 1724.427837] env[61962]: DEBUG nova.network.neutron [None req-36219049-67ba-4c74-811b-90c296d8053e tempest-ImagesTestJSON-240028984 tempest-ImagesTestJSON-240028984-project-member] [instance: 85003695-cb3e-4bce-9a3a-a1d799b36369] allocate_for_instance() {{(pid=61962) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 1724.440629] env[61962]: DEBUG nova.compute.manager [None req-36219049-67ba-4c74-811b-90c296d8053e tempest-ImagesTestJSON-240028984 tempest-ImagesTestJSON-240028984-project-member] [instance: 85003695-cb3e-4bce-9a3a-a1d799b36369] Start building block device mappings for instance. {{(pid=61962) _build_resources /opt/stack/nova/nova/compute/manager.py:2856}} [ 1724.490058] env[61962]: DEBUG nova.policy [None req-36219049-67ba-4c74-811b-90c296d8053e tempest-ImagesTestJSON-240028984 tempest-ImagesTestJSON-240028984-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '84e0d67cbeef490f920225baaa7ef0bc', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'e58b68272c404983993a09ad12e8c03b', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61962) authorize /opt/stack/nova/nova/policy.py:203}} [ 1724.515133] env[61962]: DEBUG nova.compute.manager [None req-36219049-67ba-4c74-811b-90c296d8053e tempest-ImagesTestJSON-240028984 tempest-ImagesTestJSON-240028984-project-member] [instance: 85003695-cb3e-4bce-9a3a-a1d799b36369] Start spawning the instance on the hypervisor. {{(pid=61962) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2630}} [ 1724.541941] env[61962]: DEBUG nova.virt.hardware [None req-36219049-67ba-4c74-811b-90c296d8053e tempest-ImagesTestJSON-240028984 tempest-ImagesTestJSON-240028984-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-12-01T12:09:23Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=128,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-12-01T12:09:07Z,direct_url=,disk_format='vmdk',id=f684bb17-3ab2-4bd5-a19e-4c36c57d0ebe,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='eef556fb5c4f49b889491ae31a496de5',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-12-01T12:09:08Z,virtual_size=,visibility=), allow threads: False {{(pid=61962) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1724.542215] env[61962]: DEBUG nova.virt.hardware [None req-36219049-67ba-4c74-811b-90c296d8053e tempest-ImagesTestJSON-240028984 tempest-ImagesTestJSON-240028984-project-member] Flavor limits 0:0:0 {{(pid=61962) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1724.542375] env[61962]: DEBUG nova.virt.hardware [None req-36219049-67ba-4c74-811b-90c296d8053e tempest-ImagesTestJSON-240028984 tempest-ImagesTestJSON-240028984-project-member] Image limits 0:0:0 {{(pid=61962) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1724.542557] env[61962]: DEBUG nova.virt.hardware [None req-36219049-67ba-4c74-811b-90c296d8053e tempest-ImagesTestJSON-240028984 tempest-ImagesTestJSON-240028984-project-member] Flavor pref 0:0:0 {{(pid=61962) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1724.542704] env[61962]: DEBUG nova.virt.hardware [None req-36219049-67ba-4c74-811b-90c296d8053e tempest-ImagesTestJSON-240028984 tempest-ImagesTestJSON-240028984-project-member] Image pref 0:0:0 {{(pid=61962) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1724.542853] env[61962]: DEBUG nova.virt.hardware [None req-36219049-67ba-4c74-811b-90c296d8053e tempest-ImagesTestJSON-240028984 tempest-ImagesTestJSON-240028984-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61962) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1724.543081] env[61962]: DEBUG nova.virt.hardware [None req-36219049-67ba-4c74-811b-90c296d8053e tempest-ImagesTestJSON-240028984 tempest-ImagesTestJSON-240028984-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61962) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1724.543241] env[61962]: DEBUG nova.virt.hardware [None req-36219049-67ba-4c74-811b-90c296d8053e tempest-ImagesTestJSON-240028984 tempest-ImagesTestJSON-240028984-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61962) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1724.543414] env[61962]: DEBUG nova.virt.hardware [None req-36219049-67ba-4c74-811b-90c296d8053e tempest-ImagesTestJSON-240028984 tempest-ImagesTestJSON-240028984-project-member] Got 1 possible topologies {{(pid=61962) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1724.543586] env[61962]: DEBUG nova.virt.hardware [None req-36219049-67ba-4c74-811b-90c296d8053e tempest-ImagesTestJSON-240028984 tempest-ImagesTestJSON-240028984-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61962) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1724.543756] env[61962]: DEBUG nova.virt.hardware [None req-36219049-67ba-4c74-811b-90c296d8053e tempest-ImagesTestJSON-240028984 tempest-ImagesTestJSON-240028984-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61962) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1724.544662] env[61962]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a3da4b9c-185b-4d74-90bc-c7f3dd873066 {{(pid=61962) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1724.553958] env[61962]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0edec47f-0673-45f9-a2cd-96972e642cf8 {{(pid=61962) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1724.921929] env[61962]: DEBUG nova.network.neutron [None req-36219049-67ba-4c74-811b-90c296d8053e tempest-ImagesTestJSON-240028984 tempest-ImagesTestJSON-240028984-project-member] [instance: 85003695-cb3e-4bce-9a3a-a1d799b36369] Successfully created port: e3bbde0c-96f2-479e-b94a-1ee55df05fb0 {{(pid=61962) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1725.159040] env[61962]: DEBUG oslo_service.periodic_task [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=61962) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1725.838481] env[61962]: DEBUG nova.network.neutron [None req-36219049-67ba-4c74-811b-90c296d8053e tempest-ImagesTestJSON-240028984 tempest-ImagesTestJSON-240028984-project-member] [instance: 85003695-cb3e-4bce-9a3a-a1d799b36369] Successfully updated port: e3bbde0c-96f2-479e-b94a-1ee55df05fb0 {{(pid=61962) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1725.850131] env[61962]: DEBUG nova.compute.manager [req-1b7cfc4b-fbc5-4499-8a79-bbaacf1d5510 req-6c390b33-646e-4581-895d-974ffb4c6c80 service nova] [instance: 85003695-cb3e-4bce-9a3a-a1d799b36369] Received event network-vif-plugged-e3bbde0c-96f2-479e-b94a-1ee55df05fb0 {{(pid=61962) external_instance_event /opt/stack/nova/nova/compute/manager.py:11210}} [ 1725.850383] env[61962]: DEBUG oslo_concurrency.lockutils [req-1b7cfc4b-fbc5-4499-8a79-bbaacf1d5510 req-6c390b33-646e-4581-895d-974ffb4c6c80 service nova] Acquiring lock "85003695-cb3e-4bce-9a3a-a1d799b36369-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=61962) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1725.850582] env[61962]: DEBUG oslo_concurrency.lockutils [req-1b7cfc4b-fbc5-4499-8a79-bbaacf1d5510 req-6c390b33-646e-4581-895d-974ffb4c6c80 service nova] Lock "85003695-cb3e-4bce-9a3a-a1d799b36369-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=61962) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1725.850738] env[61962]: DEBUG oslo_concurrency.lockutils [req-1b7cfc4b-fbc5-4499-8a79-bbaacf1d5510 req-6c390b33-646e-4581-895d-974ffb4c6c80 service nova] Lock "85003695-cb3e-4bce-9a3a-a1d799b36369-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=61962) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1725.850964] env[61962]: DEBUG nova.compute.manager [req-1b7cfc4b-fbc5-4499-8a79-bbaacf1d5510 req-6c390b33-646e-4581-895d-974ffb4c6c80 service nova] [instance: 85003695-cb3e-4bce-9a3a-a1d799b36369] No waiting events found dispatching network-vif-plugged-e3bbde0c-96f2-479e-b94a-1ee55df05fb0 {{(pid=61962) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1725.851292] env[61962]: WARNING nova.compute.manager [req-1b7cfc4b-fbc5-4499-8a79-bbaacf1d5510 req-6c390b33-646e-4581-895d-974ffb4c6c80 service nova] [instance: 85003695-cb3e-4bce-9a3a-a1d799b36369] Received unexpected event network-vif-plugged-e3bbde0c-96f2-479e-b94a-1ee55df05fb0 for instance with vm_state building and task_state spawning. [ 1725.852396] env[61962]: DEBUG oslo_concurrency.lockutils [None req-36219049-67ba-4c74-811b-90c296d8053e tempest-ImagesTestJSON-240028984 tempest-ImagesTestJSON-240028984-project-member] Acquiring lock "refresh_cache-85003695-cb3e-4bce-9a3a-a1d799b36369" {{(pid=61962) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1725.852565] env[61962]: DEBUG oslo_concurrency.lockutils [None req-36219049-67ba-4c74-811b-90c296d8053e tempest-ImagesTestJSON-240028984 tempest-ImagesTestJSON-240028984-project-member] Acquired lock "refresh_cache-85003695-cb3e-4bce-9a3a-a1d799b36369" {{(pid=61962) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1725.852732] env[61962]: DEBUG nova.network.neutron [None req-36219049-67ba-4c74-811b-90c296d8053e tempest-ImagesTestJSON-240028984 tempest-ImagesTestJSON-240028984-project-member] [instance: 85003695-cb3e-4bce-9a3a-a1d799b36369] Building network info cache for instance {{(pid=61962) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 1725.897029] env[61962]: DEBUG nova.network.neutron [None req-36219049-67ba-4c74-811b-90c296d8053e tempest-ImagesTestJSON-240028984 tempest-ImagesTestJSON-240028984-project-member] [instance: 85003695-cb3e-4bce-9a3a-a1d799b36369] Instance cache missing network info. {{(pid=61962) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 1726.158589] env[61962]: DEBUG oslo_service.periodic_task [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Running periodic task ComputeManager._cleanup_incomplete_migrations {{(pid=61962) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1726.158974] env[61962]: DEBUG nova.compute.manager [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Cleaning up deleted instances with incomplete migration {{(pid=61962) _cleanup_incomplete_migrations /opt/stack/nova/nova/compute/manager.py:11345}} [ 1726.167807] env[61962]: DEBUG oslo_service.periodic_task [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Running periodic task ComputeManager._cleanup_expired_console_auth_tokens {{(pid=61962) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1726.182148] env[61962]: DEBUG nova.network.neutron [None req-36219049-67ba-4c74-811b-90c296d8053e tempest-ImagesTestJSON-240028984 tempest-ImagesTestJSON-240028984-project-member] [instance: 85003695-cb3e-4bce-9a3a-a1d799b36369] Updating instance_info_cache with network_info: [{"id": "e3bbde0c-96f2-479e-b94a-1ee55df05fb0", "address": "fa:16:3e:4f:96:d8", "network": {"id": "4128bf2d-763a-4743-9aa9-47746ebfaa93", "bridge": "br-int", "label": "tempest-ImagesTestJSON-1442080107-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "e58b68272c404983993a09ad12e8c03b", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "be8bd197-4b2b-46e7-88ea-2554b0438584", "external-id": "nsx-vlan-transportzone-338", "segmentation_id": 338, "bound_drivers": {"0": "nsxv3"}}, "devname": "tape3bbde0c-96", "ovs_interfaceid": "e3bbde0c-96f2-479e-b94a-1ee55df05fb0", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61962) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1726.196821] env[61962]: DEBUG oslo_concurrency.lockutils [None req-36219049-67ba-4c74-811b-90c296d8053e tempest-ImagesTestJSON-240028984 tempest-ImagesTestJSON-240028984-project-member] Releasing lock "refresh_cache-85003695-cb3e-4bce-9a3a-a1d799b36369" {{(pid=61962) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1726.196821] env[61962]: DEBUG nova.compute.manager [None req-36219049-67ba-4c74-811b-90c296d8053e tempest-ImagesTestJSON-240028984 tempest-ImagesTestJSON-240028984-project-member] [instance: 85003695-cb3e-4bce-9a3a-a1d799b36369] Instance network_info: |[{"id": "e3bbde0c-96f2-479e-b94a-1ee55df05fb0", "address": "fa:16:3e:4f:96:d8", "network": {"id": "4128bf2d-763a-4743-9aa9-47746ebfaa93", "bridge": "br-int", "label": "tempest-ImagesTestJSON-1442080107-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "e58b68272c404983993a09ad12e8c03b", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "be8bd197-4b2b-46e7-88ea-2554b0438584", "external-id": "nsx-vlan-transportzone-338", "segmentation_id": 338, "bound_drivers": {"0": "nsxv3"}}, "devname": "tape3bbde0c-96", "ovs_interfaceid": "e3bbde0c-96f2-479e-b94a-1ee55df05fb0", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=61962) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1726.197490] env[61962]: DEBUG nova.virt.vmwareapi.vmops [None req-36219049-67ba-4c74-811b-90c296d8053e tempest-ImagesTestJSON-240028984 tempest-ImagesTestJSON-240028984-project-member] [instance: 85003695-cb3e-4bce-9a3a-a1d799b36369] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:4f:96:d8', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'be8bd197-4b2b-46e7-88ea-2554b0438584', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'e3bbde0c-96f2-479e-b94a-1ee55df05fb0', 'vif_model': 'vmxnet3'}] {{(pid=61962) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1726.205820] env[61962]: DEBUG oslo.service.loopingcall [None req-36219049-67ba-4c74-811b-90c296d8053e tempest-ImagesTestJSON-240028984 tempest-ImagesTestJSON-240028984-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=61962) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1726.206016] env[61962]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 85003695-cb3e-4bce-9a3a-a1d799b36369] Creating VM on the ESX host {{(pid=61962) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 1726.206271] env[61962]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-63337d83-120c-4936-b691-cb74b8dff894 {{(pid=61962) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1726.229189] env[61962]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1726.229189] env[61962]: value = "task-4892076" [ 1726.229189] env[61962]: _type = "Task" [ 1726.229189] env[61962]: } to complete. {{(pid=61962) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1726.237951] env[61962]: DEBUG oslo_vmware.api [-] Task: {'id': task-4892076, 'name': CreateVM_Task} progress is 0%. {{(pid=61962) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1726.741127] env[61962]: DEBUG oslo_vmware.api [-] Task: {'id': task-4892076, 'name': CreateVM_Task, 'duration_secs': 0.331738} completed successfully. {{(pid=61962) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1726.741331] env[61962]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 85003695-cb3e-4bce-9a3a-a1d799b36369] Created VM on the ESX host {{(pid=61962) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 1726.742026] env[61962]: DEBUG oslo_concurrency.lockutils [None req-36219049-67ba-4c74-811b-90c296d8053e tempest-ImagesTestJSON-240028984 tempest-ImagesTestJSON-240028984-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/f684bb17-3ab2-4bd5-a19e-4c36c57d0ebe" {{(pid=61962) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1726.742198] env[61962]: DEBUG oslo_concurrency.lockutils [None req-36219049-67ba-4c74-811b-90c296d8053e tempest-ImagesTestJSON-240028984 tempest-ImagesTestJSON-240028984-project-member] Acquired lock "[datastore2] devstack-image-cache_base/f684bb17-3ab2-4bd5-a19e-4c36c57d0ebe" {{(pid=61962) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1726.742541] env[61962]: DEBUG oslo_concurrency.lockutils [None req-36219049-67ba-4c74-811b-90c296d8053e tempest-ImagesTestJSON-240028984 tempest-ImagesTestJSON-240028984-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/f684bb17-3ab2-4bd5-a19e-4c36c57d0ebe" {{(pid=61962) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1726.742766] env[61962]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-b61841a2-318d-43f5-b2bf-156a38f6cd4a {{(pid=61962) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1726.747988] env[61962]: DEBUG oslo_vmware.api [None req-36219049-67ba-4c74-811b-90c296d8053e tempest-ImagesTestJSON-240028984 tempest-ImagesTestJSON-240028984-project-member] Waiting for the task: (returnval){ [ 1726.747988] env[61962]: value = "session[5210918f-aadc-9b29-42fa-0929c2e42627]5288882f-178a-318f-ba1c-3303b7f88144" [ 1726.747988] env[61962]: _type = "Task" [ 1726.747988] env[61962]: } to complete. {{(pid=61962) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1726.756318] env[61962]: DEBUG oslo_vmware.api [None req-36219049-67ba-4c74-811b-90c296d8053e tempest-ImagesTestJSON-240028984 tempest-ImagesTestJSON-240028984-project-member] Task: {'id': session[5210918f-aadc-9b29-42fa-0929c2e42627]5288882f-178a-318f-ba1c-3303b7f88144, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61962) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1727.258371] env[61962]: DEBUG oslo_concurrency.lockutils [None req-36219049-67ba-4c74-811b-90c296d8053e tempest-ImagesTestJSON-240028984 tempest-ImagesTestJSON-240028984-project-member] Releasing lock "[datastore2] devstack-image-cache_base/f684bb17-3ab2-4bd5-a19e-4c36c57d0ebe" {{(pid=61962) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1727.258746] env[61962]: DEBUG nova.virt.vmwareapi.vmops [None req-36219049-67ba-4c74-811b-90c296d8053e tempest-ImagesTestJSON-240028984 tempest-ImagesTestJSON-240028984-project-member] [instance: 85003695-cb3e-4bce-9a3a-a1d799b36369] Processing image f684bb17-3ab2-4bd5-a19e-4c36c57d0ebe {{(pid=61962) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1727.258823] env[61962]: DEBUG oslo_concurrency.lockutils [None req-36219049-67ba-4c74-811b-90c296d8053e tempest-ImagesTestJSON-240028984 tempest-ImagesTestJSON-240028984-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/f684bb17-3ab2-4bd5-a19e-4c36c57d0ebe/f684bb17-3ab2-4bd5-a19e-4c36c57d0ebe.vmdk" {{(pid=61962) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1727.875851] env[61962]: DEBUG nova.compute.manager [req-42537ec2-28d0-4b50-9e90-a4f2ef18c4e9 req-8e4eab87-06b6-40df-99b3-aa4eb30355c6 service nova] [instance: 85003695-cb3e-4bce-9a3a-a1d799b36369] Received event network-changed-e3bbde0c-96f2-479e-b94a-1ee55df05fb0 {{(pid=61962) external_instance_event /opt/stack/nova/nova/compute/manager.py:11210}} [ 1727.876012] env[61962]: DEBUG nova.compute.manager [req-42537ec2-28d0-4b50-9e90-a4f2ef18c4e9 req-8e4eab87-06b6-40df-99b3-aa4eb30355c6 service nova] [instance: 85003695-cb3e-4bce-9a3a-a1d799b36369] Refreshing instance network info cache due to event network-changed-e3bbde0c-96f2-479e-b94a-1ee55df05fb0. {{(pid=61962) external_instance_event /opt/stack/nova/nova/compute/manager.py:11215}} [ 1727.876289] env[61962]: DEBUG oslo_concurrency.lockutils [req-42537ec2-28d0-4b50-9e90-a4f2ef18c4e9 req-8e4eab87-06b6-40df-99b3-aa4eb30355c6 service nova] Acquiring lock "refresh_cache-85003695-cb3e-4bce-9a3a-a1d799b36369" {{(pid=61962) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1727.876442] env[61962]: DEBUG oslo_concurrency.lockutils [req-42537ec2-28d0-4b50-9e90-a4f2ef18c4e9 req-8e4eab87-06b6-40df-99b3-aa4eb30355c6 service nova] Acquired lock "refresh_cache-85003695-cb3e-4bce-9a3a-a1d799b36369" {{(pid=61962) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1727.876632] env[61962]: DEBUG nova.network.neutron [req-42537ec2-28d0-4b50-9e90-a4f2ef18c4e9 req-8e4eab87-06b6-40df-99b3-aa4eb30355c6 service nova] [instance: 85003695-cb3e-4bce-9a3a-a1d799b36369] Refreshing network info cache for port e3bbde0c-96f2-479e-b94a-1ee55df05fb0 {{(pid=61962) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 1728.141461] env[61962]: DEBUG nova.network.neutron [req-42537ec2-28d0-4b50-9e90-a4f2ef18c4e9 req-8e4eab87-06b6-40df-99b3-aa4eb30355c6 service nova] [instance: 85003695-cb3e-4bce-9a3a-a1d799b36369] Updated VIF entry in instance network info cache for port e3bbde0c-96f2-479e-b94a-1ee55df05fb0. {{(pid=61962) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 1728.141839] env[61962]: DEBUG nova.network.neutron [req-42537ec2-28d0-4b50-9e90-a4f2ef18c4e9 req-8e4eab87-06b6-40df-99b3-aa4eb30355c6 service nova] [instance: 85003695-cb3e-4bce-9a3a-a1d799b36369] Updating instance_info_cache with network_info: [{"id": "e3bbde0c-96f2-479e-b94a-1ee55df05fb0", "address": "fa:16:3e:4f:96:d8", "network": {"id": "4128bf2d-763a-4743-9aa9-47746ebfaa93", "bridge": "br-int", "label": "tempest-ImagesTestJSON-1442080107-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "e58b68272c404983993a09ad12e8c03b", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "be8bd197-4b2b-46e7-88ea-2554b0438584", "external-id": "nsx-vlan-transportzone-338", "segmentation_id": 338, "bound_drivers": {"0": "nsxv3"}}, "devname": "tape3bbde0c-96", "ovs_interfaceid": "e3bbde0c-96f2-479e-b94a-1ee55df05fb0", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61962) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1728.151750] env[61962]: DEBUG oslo_concurrency.lockutils [req-42537ec2-28d0-4b50-9e90-a4f2ef18c4e9 req-8e4eab87-06b6-40df-99b3-aa4eb30355c6 service nova] Releasing lock "refresh_cache-85003695-cb3e-4bce-9a3a-a1d799b36369" {{(pid=61962) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1737.473308] env[61962]: DEBUG oslo_service.periodic_task [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Running periodic task ComputeManager._sync_power_states {{(pid=61962) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1737.497322] env[61962]: DEBUG nova.virt.vmwareapi.vmops [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Getting list of instances from cluster (obj){ [ 1737.497322] env[61962]: value = "domain-c8" [ 1737.497322] env[61962]: _type = "ClusterComputeResource" [ 1737.497322] env[61962]: } {{(pid=61962) list_instances /opt/stack/nova/nova/virt/vmwareapi/vmops.py:2122}} [ 1737.498601] env[61962]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bb43ac54-a56d-4cad-8c6c-34ca9adf6a2a {{(pid=61962) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1737.515765] env[61962]: DEBUG nova.virt.vmwareapi.vmops [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Got total of 10 instances {{(pid=61962) list_instances /opt/stack/nova/nova/virt/vmwareapi/vmops.py:2131}} [ 1737.515936] env[61962]: DEBUG nova.compute.manager [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Triggering sync for uuid c3c36949-754e-4bab-860d-74e0cde4dfd6 {{(pid=61962) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10417}} [ 1737.516130] env[61962]: DEBUG nova.compute.manager [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Triggering sync for uuid 6f0c884b-afb3-4d06-9dd2-f07099c364e0 {{(pid=61962) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10417}} [ 1737.516363] env[61962]: DEBUG nova.compute.manager [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Triggering sync for uuid 3d2749f8-18d2-4631-adc2-093988125eb4 {{(pid=61962) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10417}} [ 1737.516550] env[61962]: DEBUG nova.compute.manager [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Triggering sync for uuid 1035139f-2193-4d1c-a220-369ef6096ae0 {{(pid=61962) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10417}} [ 1737.516708] env[61962]: DEBUG nova.compute.manager [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Triggering sync for uuid 53048279-c3ea-484f-9a3e-049b12892f7a {{(pid=61962) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10417}} [ 1737.516865] env[61962]: DEBUG nova.compute.manager [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Triggering sync for uuid 25f6fa54-3d0d-4470-b5c9-06ba0ad2663d {{(pid=61962) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10417}} [ 1737.517030] env[61962]: DEBUG nova.compute.manager [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Triggering sync for uuid 1e71cc99-f026-4197-b7f1-ae18f7f99af6 {{(pid=61962) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10417}} [ 1737.517193] env[61962]: DEBUG nova.compute.manager [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Triggering sync for uuid 2cf5409f-eaff-4b63-b8bb-eb15a7a81760 {{(pid=61962) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10417}} [ 1737.517345] env[61962]: DEBUG nova.compute.manager [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Triggering sync for uuid 02c198ea-61de-4e70-866c-39781d353d04 {{(pid=61962) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10417}} [ 1737.517532] env[61962]: DEBUG nova.compute.manager [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Triggering sync for uuid 85003695-cb3e-4bce-9a3a-a1d799b36369 {{(pid=61962) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10417}} [ 1737.517811] env[61962]: DEBUG oslo_concurrency.lockutils [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Acquiring lock "c3c36949-754e-4bab-860d-74e0cde4dfd6" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=61962) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1737.518053] env[61962]: DEBUG oslo_concurrency.lockutils [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Acquiring lock "6f0c884b-afb3-4d06-9dd2-f07099c364e0" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=61962) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1737.518259] env[61962]: DEBUG oslo_concurrency.lockutils [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Acquiring lock "3d2749f8-18d2-4631-adc2-093988125eb4" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=61962) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1737.518456] env[61962]: DEBUG oslo_concurrency.lockutils [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Acquiring lock "1035139f-2193-4d1c-a220-369ef6096ae0" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=61962) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1737.518646] env[61962]: DEBUG oslo_concurrency.lockutils [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Acquiring lock "53048279-c3ea-484f-9a3e-049b12892f7a" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=61962) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1737.518836] env[61962]: DEBUG oslo_concurrency.lockutils [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Acquiring lock "25f6fa54-3d0d-4470-b5c9-06ba0ad2663d" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=61962) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1737.519038] env[61962]: DEBUG oslo_concurrency.lockutils [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Acquiring lock "1e71cc99-f026-4197-b7f1-ae18f7f99af6" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=61962) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1737.519237] env[61962]: DEBUG oslo_concurrency.lockutils [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Acquiring lock "2cf5409f-eaff-4b63-b8bb-eb15a7a81760" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=61962) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1737.519426] env[61962]: DEBUG oslo_concurrency.lockutils [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Acquiring lock "02c198ea-61de-4e70-866c-39781d353d04" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=61962) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1737.519613] env[61962]: DEBUG oslo_concurrency.lockutils [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Acquiring lock "85003695-cb3e-4bce-9a3a-a1d799b36369" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=61962) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1768.159247] env[61962]: DEBUG oslo_service.periodic_task [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Running periodic task ComputeManager.update_available_resource {{(pid=61962) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1768.171931] env[61962]: DEBUG oslo_concurrency.lockutils [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=61962) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1768.172186] env[61962]: DEBUG oslo_concurrency.lockutils [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=61962) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1768.172357] env[61962]: DEBUG oslo_concurrency.lockutils [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=61962) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1768.172516] env[61962]: DEBUG nova.compute.resource_tracker [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=61962) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 1768.173737] env[61962]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d85f52ad-2811-4421-9768-f74d7f3b19b5 {{(pid=61962) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1768.183282] env[61962]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-871ffab9-1aa5-4f8e-a17d-e3c532d7dd1b {{(pid=61962) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1768.198470] env[61962]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e23bf21c-e183-44e1-8549-04e4464e4c75 {{(pid=61962) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1768.205834] env[61962]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-464a3236-2147-4e38-aa79-348ad64f5ee7 {{(pid=61962) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1768.235324] env[61962]: DEBUG nova.compute.resource_tracker [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=180552MB free_disk=96GB free_vcpus=48 pci_devices=None {{(pid=61962) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 1768.235496] env[61962]: DEBUG oslo_concurrency.lockutils [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=61962) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1768.235689] env[61962]: DEBUG oslo_concurrency.lockutils [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=61962) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1768.375782] env[61962]: DEBUG nova.compute.resource_tracker [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Instance c3c36949-754e-4bab-860d-74e0cde4dfd6 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61962) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 1768.375939] env[61962]: DEBUG nova.compute.resource_tracker [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Instance 6f0c884b-afb3-4d06-9dd2-f07099c364e0 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61962) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 1768.376076] env[61962]: DEBUG nova.compute.resource_tracker [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Instance 3d2749f8-18d2-4631-adc2-093988125eb4 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61962) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 1768.376203] env[61962]: DEBUG nova.compute.resource_tracker [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Instance 1035139f-2193-4d1c-a220-369ef6096ae0 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61962) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 1768.376324] env[61962]: DEBUG nova.compute.resource_tracker [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Instance 53048279-c3ea-484f-9a3e-049b12892f7a actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61962) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 1768.376467] env[61962]: DEBUG nova.compute.resource_tracker [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Instance 25f6fa54-3d0d-4470-b5c9-06ba0ad2663d actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61962) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 1768.376586] env[61962]: DEBUG nova.compute.resource_tracker [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Instance 1e71cc99-f026-4197-b7f1-ae18f7f99af6 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61962) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 1768.376705] env[61962]: DEBUG nova.compute.resource_tracker [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Instance 2cf5409f-eaff-4b63-b8bb-eb15a7a81760 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61962) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 1768.376821] env[61962]: DEBUG nova.compute.resource_tracker [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Instance 02c198ea-61de-4e70-866c-39781d353d04 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61962) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 1768.376935] env[61962]: DEBUG nova.compute.resource_tracker [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Instance 85003695-cb3e-4bce-9a3a-a1d799b36369 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61962) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 1768.377148] env[61962]: DEBUG nova.compute.resource_tracker [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Total usable vcpus: 48, total allocated vcpus: 10 {{(pid=61962) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 1768.377309] env[61962]: DEBUG nova.compute.resource_tracker [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=1792MB phys_disk=100GB used_disk=10GB total_vcpus=48 used_vcpus=10 pci_stats=[] stats={'failed_builds': '77', 'num_instances': '10', 'num_vm_building': '10', 'num_task_deleting': '8', 'num_os_type_None': '10', 'num_proj_eaa1993aea5f4f039f35b32e2321e30f': '1', 'io_workload': '10', 'num_proj_a31ea93dd586435ebb82c1dbfd14c2ad': '1', 'num_proj_34477c98bf9c4a87971a62cb96b072c7': '1', 'num_proj_e58b68272c404983993a09ad12e8c03b': '2', 'num_proj_88a7b37d54f047149b05fc80e0b51601': '1', 'num_proj_40bdd99e47aa40a28edd38c6b4a09a5f': '1', 'num_task_spawning': '2', 'num_proj_56bb35c701074f02820d93ad5911d089': '2', 'num_proj_cc34536d51ef4a22aef0b274ba5aa28e': '1'} {{(pid=61962) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 1768.393985] env[61962]: DEBUG nova.scheduler.client.report [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Refreshing inventories for resource provider 5a20dc57-fddd-49ec-bab5-953a03eebaa1 {{(pid=61962) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:818}} [ 1768.409181] env[61962]: DEBUG nova.scheduler.client.report [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Updating ProviderTree inventory for provider 5a20dc57-fddd-49ec-bab5-953a03eebaa1 from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 96, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61962) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:782}} [ 1768.409402] env[61962]: DEBUG nova.compute.provider_tree [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Updating inventory in ProviderTree for provider 5a20dc57-fddd-49ec-bab5-953a03eebaa1 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 96, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61962) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1768.421241] env[61962]: DEBUG nova.scheduler.client.report [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Refreshing aggregate associations for resource provider 5a20dc57-fddd-49ec-bab5-953a03eebaa1, aggregates: None {{(pid=61962) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:827}} [ 1768.442234] env[61962]: DEBUG nova.scheduler.client.report [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Refreshing trait associations for resource provider 5a20dc57-fddd-49ec-bab5-953a03eebaa1, traits: COMPUTE_IMAGE_TYPE_ISO,COMPUTE_NODE,COMPUTE_IMAGE_TYPE_VMDK,COMPUTE_NET_ATTACH_INTERFACE,COMPUTE_SAME_HOST_COLD_MIGRATE {{(pid=61962) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:839}} [ 1768.569443] env[61962]: WARNING oslo_vmware.rw_handles [None req-6a898c7a-d8c8-4cee-8be6-7d5d37c58c60 tempest-ServerPasswordTestJSON-1801851891 tempest-ServerPasswordTestJSON-1801851891-project-member] Error occurred while reading the HTTP response.: http.client.RemoteDisconnected: Remote end closed connection without response [ 1768.569443] env[61962]: ERROR oslo_vmware.rw_handles Traceback (most recent call last): [ 1768.569443] env[61962]: ERROR oslo_vmware.rw_handles File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py", line 283, in close [ 1768.569443] env[61962]: ERROR oslo_vmware.rw_handles self._conn.getresponse() [ 1768.569443] env[61962]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 1375, in getresponse [ 1768.569443] env[61962]: ERROR oslo_vmware.rw_handles response.begin() [ 1768.569443] env[61962]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 318, in begin [ 1768.569443] env[61962]: ERROR oslo_vmware.rw_handles version, status, reason = self._read_status() [ 1768.569443] env[61962]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 287, in _read_status [ 1768.569443] env[61962]: ERROR oslo_vmware.rw_handles raise RemoteDisconnected("Remote end closed connection without" [ 1768.569443] env[61962]: ERROR oslo_vmware.rw_handles http.client.RemoteDisconnected: Remote end closed connection without response [ 1768.569443] env[61962]: ERROR oslo_vmware.rw_handles [ 1768.569443] env[61962]: DEBUG nova.virt.vmwareapi.images [None req-6a898c7a-d8c8-4cee-8be6-7d5d37c58c60 tempest-ServerPasswordTestJSON-1801851891 tempest-ServerPasswordTestJSON-1801851891-project-member] [instance: c3c36949-754e-4bab-860d-74e0cde4dfd6] Downloaded image file data f684bb17-3ab2-4bd5-a19e-4c36c57d0ebe to vmware_temp/f7720ed6-8663-4e6e-864d-d02c2f6afa0c/f684bb17-3ab2-4bd5-a19e-4c36c57d0ebe/tmp-sparse.vmdk on the data store datastore2 {{(pid=61962) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:258}} [ 1768.571890] env[61962]: DEBUG nova.virt.vmwareapi.vmops [None req-6a898c7a-d8c8-4cee-8be6-7d5d37c58c60 tempest-ServerPasswordTestJSON-1801851891 tempest-ServerPasswordTestJSON-1801851891-project-member] [instance: c3c36949-754e-4bab-860d-74e0cde4dfd6] Caching image {{(pid=61962) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 1768.572179] env[61962]: DEBUG nova.virt.vmwareapi.vm_util [None req-6a898c7a-d8c8-4cee-8be6-7d5d37c58c60 tempest-ServerPasswordTestJSON-1801851891 tempest-ServerPasswordTestJSON-1801851891-project-member] Copying Virtual Disk [datastore2] vmware_temp/f7720ed6-8663-4e6e-864d-d02c2f6afa0c/f684bb17-3ab2-4bd5-a19e-4c36c57d0ebe/tmp-sparse.vmdk to [datastore2] vmware_temp/f7720ed6-8663-4e6e-864d-d02c2f6afa0c/f684bb17-3ab2-4bd5-a19e-4c36c57d0ebe/f684bb17-3ab2-4bd5-a19e-4c36c57d0ebe.vmdk {{(pid=61962) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 1768.572479] env[61962]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-c033a5f0-0b75-4feb-b440-b6d5d9357c23 {{(pid=61962) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1768.576460] env[61962]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0ccea1df-2bfa-463e-84c4-af98b74eb089 {{(pid=61962) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1768.585847] env[61962]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c674dd99-0dc2-4fdb-baa7-6803b8adee7e {{(pid=61962) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1768.589105] env[61962]: DEBUG oslo_vmware.api [None req-6a898c7a-d8c8-4cee-8be6-7d5d37c58c60 tempest-ServerPasswordTestJSON-1801851891 tempest-ServerPasswordTestJSON-1801851891-project-member] Waiting for the task: (returnval){ [ 1768.589105] env[61962]: value = "task-4892077" [ 1768.589105] env[61962]: _type = "Task" [ 1768.589105] env[61962]: } to complete. {{(pid=61962) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1768.619963] env[61962]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-715c23f4-4964-44ca-8dd7-df7e4fe5416a {{(pid=61962) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1768.625744] env[61962]: DEBUG oslo_vmware.api [None req-6a898c7a-d8c8-4cee-8be6-7d5d37c58c60 tempest-ServerPasswordTestJSON-1801851891 tempest-ServerPasswordTestJSON-1801851891-project-member] Task: {'id': task-4892077, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61962) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1768.630966] env[61962]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-100c912c-38b5-4c1c-8a0d-72f685e80cae {{(pid=61962) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1768.645293] env[61962]: DEBUG nova.compute.provider_tree [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Inventory has not changed in ProviderTree for provider: 5a20dc57-fddd-49ec-bab5-953a03eebaa1 {{(pid=61962) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1768.655965] env[61962]: DEBUG nova.scheduler.client.report [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Inventory has not changed for provider 5a20dc57-fddd-49ec-bab5-953a03eebaa1 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 96, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61962) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1768.671302] env[61962]: DEBUG nova.compute.resource_tracker [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=61962) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 1768.671438] env[61962]: DEBUG oslo_concurrency.lockutils [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 0.436s {{(pid=61962) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1769.100091] env[61962]: DEBUG oslo_vmware.exceptions [None req-6a898c7a-d8c8-4cee-8be6-7d5d37c58c60 tempest-ServerPasswordTestJSON-1801851891 tempest-ServerPasswordTestJSON-1801851891-project-member] Fault InvalidArgument not matched. {{(pid=61962) get_fault_class /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/exceptions.py:290}} [ 1769.100403] env[61962]: DEBUG oslo_concurrency.lockutils [None req-6a898c7a-d8c8-4cee-8be6-7d5d37c58c60 tempest-ServerPasswordTestJSON-1801851891 tempest-ServerPasswordTestJSON-1801851891-project-member] Releasing lock "[datastore2] devstack-image-cache_base/f684bb17-3ab2-4bd5-a19e-4c36c57d0ebe/f684bb17-3ab2-4bd5-a19e-4c36c57d0ebe.vmdk" {{(pid=61962) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1769.101089] env[61962]: ERROR nova.compute.manager [None req-6a898c7a-d8c8-4cee-8be6-7d5d37c58c60 tempest-ServerPasswordTestJSON-1801851891 tempest-ServerPasswordTestJSON-1801851891-project-member] [instance: c3c36949-754e-4bab-860d-74e0cde4dfd6] Instance failed to spawn: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1769.101089] env[61962]: Faults: ['InvalidArgument'] [ 1769.101089] env[61962]: ERROR nova.compute.manager [instance: c3c36949-754e-4bab-860d-74e0cde4dfd6] Traceback (most recent call last): [ 1769.101089] env[61962]: ERROR nova.compute.manager [instance: c3c36949-754e-4bab-860d-74e0cde4dfd6] File "/opt/stack/nova/nova/compute/manager.py", line 2886, in _build_resources [ 1769.101089] env[61962]: ERROR nova.compute.manager [instance: c3c36949-754e-4bab-860d-74e0cde4dfd6] yield resources [ 1769.101089] env[61962]: ERROR nova.compute.manager [instance: c3c36949-754e-4bab-860d-74e0cde4dfd6] File "/opt/stack/nova/nova/compute/manager.py", line 2633, in _build_and_run_instance [ 1769.101089] env[61962]: ERROR nova.compute.manager [instance: c3c36949-754e-4bab-860d-74e0cde4dfd6] self.driver.spawn(context, instance, image_meta, [ 1769.101089] env[61962]: ERROR nova.compute.manager [instance: c3c36949-754e-4bab-860d-74e0cde4dfd6] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 1769.101089] env[61962]: ERROR nova.compute.manager [instance: c3c36949-754e-4bab-860d-74e0cde4dfd6] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1769.101089] env[61962]: ERROR nova.compute.manager [instance: c3c36949-754e-4bab-860d-74e0cde4dfd6] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 1769.101089] env[61962]: ERROR nova.compute.manager [instance: c3c36949-754e-4bab-860d-74e0cde4dfd6] self._fetch_image_if_missing(context, vi) [ 1769.101089] env[61962]: ERROR nova.compute.manager [instance: c3c36949-754e-4bab-860d-74e0cde4dfd6] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 1769.101482] env[61962]: ERROR nova.compute.manager [instance: c3c36949-754e-4bab-860d-74e0cde4dfd6] image_cache(vi, tmp_image_ds_loc) [ 1769.101482] env[61962]: ERROR nova.compute.manager [instance: c3c36949-754e-4bab-860d-74e0cde4dfd6] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 1769.101482] env[61962]: ERROR nova.compute.manager [instance: c3c36949-754e-4bab-860d-74e0cde4dfd6] vm_util.copy_virtual_disk( [ 1769.101482] env[61962]: ERROR nova.compute.manager [instance: c3c36949-754e-4bab-860d-74e0cde4dfd6] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 1769.101482] env[61962]: ERROR nova.compute.manager [instance: c3c36949-754e-4bab-860d-74e0cde4dfd6] session._wait_for_task(vmdk_copy_task) [ 1769.101482] env[61962]: ERROR nova.compute.manager [instance: c3c36949-754e-4bab-860d-74e0cde4dfd6] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 1769.101482] env[61962]: ERROR nova.compute.manager [instance: c3c36949-754e-4bab-860d-74e0cde4dfd6] return self.wait_for_task(task_ref) [ 1769.101482] env[61962]: ERROR nova.compute.manager [instance: c3c36949-754e-4bab-860d-74e0cde4dfd6] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 1769.101482] env[61962]: ERROR nova.compute.manager [instance: c3c36949-754e-4bab-860d-74e0cde4dfd6] return evt.wait() [ 1769.101482] env[61962]: ERROR nova.compute.manager [instance: c3c36949-754e-4bab-860d-74e0cde4dfd6] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 1769.101482] env[61962]: ERROR nova.compute.manager [instance: c3c36949-754e-4bab-860d-74e0cde4dfd6] result = hub.switch() [ 1769.101482] env[61962]: ERROR nova.compute.manager [instance: c3c36949-754e-4bab-860d-74e0cde4dfd6] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 1769.101482] env[61962]: ERROR nova.compute.manager [instance: c3c36949-754e-4bab-860d-74e0cde4dfd6] return self.greenlet.switch() [ 1769.101848] env[61962]: ERROR nova.compute.manager [instance: c3c36949-754e-4bab-860d-74e0cde4dfd6] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 1769.101848] env[61962]: ERROR nova.compute.manager [instance: c3c36949-754e-4bab-860d-74e0cde4dfd6] self.f(*self.args, **self.kw) [ 1769.101848] env[61962]: ERROR nova.compute.manager [instance: c3c36949-754e-4bab-860d-74e0cde4dfd6] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 1769.101848] env[61962]: ERROR nova.compute.manager [instance: c3c36949-754e-4bab-860d-74e0cde4dfd6] raise exceptions.translate_fault(task_info.error) [ 1769.101848] env[61962]: ERROR nova.compute.manager [instance: c3c36949-754e-4bab-860d-74e0cde4dfd6] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1769.101848] env[61962]: ERROR nova.compute.manager [instance: c3c36949-754e-4bab-860d-74e0cde4dfd6] Faults: ['InvalidArgument'] [ 1769.101848] env[61962]: ERROR nova.compute.manager [instance: c3c36949-754e-4bab-860d-74e0cde4dfd6] [ 1769.101848] env[61962]: INFO nova.compute.manager [None req-6a898c7a-d8c8-4cee-8be6-7d5d37c58c60 tempest-ServerPasswordTestJSON-1801851891 tempest-ServerPasswordTestJSON-1801851891-project-member] [instance: c3c36949-754e-4bab-860d-74e0cde4dfd6] Terminating instance [ 1769.102969] env[61962]: DEBUG oslo_concurrency.lockutils [None req-9dfce35c-cfdc-4c34-8fd9-1d51d314d579 tempest-AttachInterfacesTestJSON-87713823 tempest-AttachInterfacesTestJSON-87713823-project-member] Acquired lock "[datastore2] devstack-image-cache_base/f684bb17-3ab2-4bd5-a19e-4c36c57d0ebe/f684bb17-3ab2-4bd5-a19e-4c36c57d0ebe.vmdk" {{(pid=61962) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1769.103180] env[61962]: DEBUG nova.virt.vmwareapi.ds_util [None req-9dfce35c-cfdc-4c34-8fd9-1d51d314d579 tempest-AttachInterfacesTestJSON-87713823 tempest-AttachInterfacesTestJSON-87713823-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=61962) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1769.103426] env[61962]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-d49e2914-8328-440e-b50f-ec54108e5b04 {{(pid=61962) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1769.105681] env[61962]: DEBUG nova.compute.manager [None req-6a898c7a-d8c8-4cee-8be6-7d5d37c58c60 tempest-ServerPasswordTestJSON-1801851891 tempest-ServerPasswordTestJSON-1801851891-project-member] [instance: c3c36949-754e-4bab-860d-74e0cde4dfd6] Start destroying the instance on the hypervisor. {{(pid=61962) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3142}} [ 1769.105881] env[61962]: DEBUG nova.virt.vmwareapi.vmops [None req-6a898c7a-d8c8-4cee-8be6-7d5d37c58c60 tempest-ServerPasswordTestJSON-1801851891 tempest-ServerPasswordTestJSON-1801851891-project-member] [instance: c3c36949-754e-4bab-860d-74e0cde4dfd6] Destroying instance {{(pid=61962) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1769.106621] env[61962]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-01a15c10-423e-4234-846b-f6f686a3b7fd {{(pid=61962) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1769.113882] env[61962]: DEBUG nova.virt.vmwareapi.vmops [None req-6a898c7a-d8c8-4cee-8be6-7d5d37c58c60 tempest-ServerPasswordTestJSON-1801851891 tempest-ServerPasswordTestJSON-1801851891-project-member] [instance: c3c36949-754e-4bab-860d-74e0cde4dfd6] Unregistering the VM {{(pid=61962) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 1769.114130] env[61962]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-e9c00c55-3441-4ad0-8c81-e37ff77a82f3 {{(pid=61962) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1769.116400] env[61962]: DEBUG nova.virt.vmwareapi.ds_util [None req-9dfce35c-cfdc-4c34-8fd9-1d51d314d579 tempest-AttachInterfacesTestJSON-87713823 tempest-AttachInterfacesTestJSON-87713823-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=61962) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1769.116570] env[61962]: DEBUG nova.virt.vmwareapi.vmops [None req-9dfce35c-cfdc-4c34-8fd9-1d51d314d579 tempest-AttachInterfacesTestJSON-87713823 tempest-AttachInterfacesTestJSON-87713823-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=61962) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 1769.117507] env[61962]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-25ba0991-e65a-4ab3-9fb8-19f99193a5e6 {{(pid=61962) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1769.122664] env[61962]: DEBUG oslo_vmware.api [None req-9dfce35c-cfdc-4c34-8fd9-1d51d314d579 tempest-AttachInterfacesTestJSON-87713823 tempest-AttachInterfacesTestJSON-87713823-project-member] Waiting for the task: (returnval){ [ 1769.122664] env[61962]: value = "session[5210918f-aadc-9b29-42fa-0929c2e42627]527777f9-791e-bbeb-d033-9d177fe97a8f" [ 1769.122664] env[61962]: _type = "Task" [ 1769.122664] env[61962]: } to complete. {{(pid=61962) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1769.131968] env[61962]: DEBUG oslo_vmware.api [None req-9dfce35c-cfdc-4c34-8fd9-1d51d314d579 tempest-AttachInterfacesTestJSON-87713823 tempest-AttachInterfacesTestJSON-87713823-project-member] Task: {'id': session[5210918f-aadc-9b29-42fa-0929c2e42627]527777f9-791e-bbeb-d033-9d177fe97a8f, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61962) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1769.186515] env[61962]: DEBUG nova.virt.vmwareapi.vmops [None req-6a898c7a-d8c8-4cee-8be6-7d5d37c58c60 tempest-ServerPasswordTestJSON-1801851891 tempest-ServerPasswordTestJSON-1801851891-project-member] [instance: c3c36949-754e-4bab-860d-74e0cde4dfd6] Unregistered the VM {{(pid=61962) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 1769.186908] env[61962]: DEBUG nova.virt.vmwareapi.vmops [None req-6a898c7a-d8c8-4cee-8be6-7d5d37c58c60 tempest-ServerPasswordTestJSON-1801851891 tempest-ServerPasswordTestJSON-1801851891-project-member] [instance: c3c36949-754e-4bab-860d-74e0cde4dfd6] Deleting contents of the VM from datastore datastore2 {{(pid=61962) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 1769.186956] env[61962]: DEBUG nova.virt.vmwareapi.ds_util [None req-6a898c7a-d8c8-4cee-8be6-7d5d37c58c60 tempest-ServerPasswordTestJSON-1801851891 tempest-ServerPasswordTestJSON-1801851891-project-member] Deleting the datastore file [datastore2] c3c36949-754e-4bab-860d-74e0cde4dfd6 {{(pid=61962) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1769.187618] env[61962]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-1ce2791b-7400-47fe-9e1c-d5bf3852f4be {{(pid=61962) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1769.194499] env[61962]: DEBUG oslo_vmware.api [None req-6a898c7a-d8c8-4cee-8be6-7d5d37c58c60 tempest-ServerPasswordTestJSON-1801851891 tempest-ServerPasswordTestJSON-1801851891-project-member] Waiting for the task: (returnval){ [ 1769.194499] env[61962]: value = "task-4892079" [ 1769.194499] env[61962]: _type = "Task" [ 1769.194499] env[61962]: } to complete. {{(pid=61962) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1769.202773] env[61962]: DEBUG oslo_vmware.api [None req-6a898c7a-d8c8-4cee-8be6-7d5d37c58c60 tempest-ServerPasswordTestJSON-1801851891 tempest-ServerPasswordTestJSON-1801851891-project-member] Task: {'id': task-4892079, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61962) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1769.633425] env[61962]: DEBUG nova.virt.vmwareapi.vmops [None req-9dfce35c-cfdc-4c34-8fd9-1d51d314d579 tempest-AttachInterfacesTestJSON-87713823 tempest-AttachInterfacesTestJSON-87713823-project-member] [instance: 6f0c884b-afb3-4d06-9dd2-f07099c364e0] Preparing fetch location {{(pid=61962) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 1769.633684] env[61962]: DEBUG nova.virt.vmwareapi.ds_util [None req-9dfce35c-cfdc-4c34-8fd9-1d51d314d579 tempest-AttachInterfacesTestJSON-87713823 tempest-AttachInterfacesTestJSON-87713823-project-member] Creating directory with path [datastore2] vmware_temp/9a6056c5-de1b-45df-a36a-5e28ffa08d32/f684bb17-3ab2-4bd5-a19e-4c36c57d0ebe {{(pid=61962) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1769.633922] env[61962]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-a75d9a1b-9d3d-4557-937c-198902cce3e4 {{(pid=61962) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1769.649859] env[61962]: DEBUG nova.virt.vmwareapi.ds_util [None req-9dfce35c-cfdc-4c34-8fd9-1d51d314d579 tempest-AttachInterfacesTestJSON-87713823 tempest-AttachInterfacesTestJSON-87713823-project-member] Created directory with path [datastore2] vmware_temp/9a6056c5-de1b-45df-a36a-5e28ffa08d32/f684bb17-3ab2-4bd5-a19e-4c36c57d0ebe {{(pid=61962) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1769.650083] env[61962]: DEBUG nova.virt.vmwareapi.vmops [None req-9dfce35c-cfdc-4c34-8fd9-1d51d314d579 tempest-AttachInterfacesTestJSON-87713823 tempest-AttachInterfacesTestJSON-87713823-project-member] [instance: 6f0c884b-afb3-4d06-9dd2-f07099c364e0] Fetch image to [datastore2] vmware_temp/9a6056c5-de1b-45df-a36a-5e28ffa08d32/f684bb17-3ab2-4bd5-a19e-4c36c57d0ebe/tmp-sparse.vmdk {{(pid=61962) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 1769.650283] env[61962]: DEBUG nova.virt.vmwareapi.vmops [None req-9dfce35c-cfdc-4c34-8fd9-1d51d314d579 tempest-AttachInterfacesTestJSON-87713823 tempest-AttachInterfacesTestJSON-87713823-project-member] [instance: 6f0c884b-afb3-4d06-9dd2-f07099c364e0] Downloading image file data f684bb17-3ab2-4bd5-a19e-4c36c57d0ebe to [datastore2] vmware_temp/9a6056c5-de1b-45df-a36a-5e28ffa08d32/f684bb17-3ab2-4bd5-a19e-4c36c57d0ebe/tmp-sparse.vmdk on the data store datastore2 {{(pid=61962) _fetch_image_as_file /opt/stack/nova/nova/virt/vmwareapi/vmops.py:399}} [ 1769.650988] env[61962]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d3d7a90a-59c2-4209-9313-cb38a56f415d {{(pid=61962) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1769.658193] env[61962]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4ac5dd76-eb84-4d62-8539-4244342888fc {{(pid=61962) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1769.667924] env[61962]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-440d23a1-e999-475f-93e3-9e8d048888e4 {{(pid=61962) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1769.700925] env[61962]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4090f117-a0b4-4909-abb4-ed643f940d4b {{(pid=61962) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1769.708729] env[61962]: DEBUG oslo_vmware.api [None req-6a898c7a-d8c8-4cee-8be6-7d5d37c58c60 tempest-ServerPasswordTestJSON-1801851891 tempest-ServerPasswordTestJSON-1801851891-project-member] Task: {'id': task-4892079, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.10167} completed successfully. {{(pid=61962) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1769.710212] env[61962]: DEBUG nova.virt.vmwareapi.ds_util [None req-6a898c7a-d8c8-4cee-8be6-7d5d37c58c60 tempest-ServerPasswordTestJSON-1801851891 tempest-ServerPasswordTestJSON-1801851891-project-member] Deleted the datastore file {{(pid=61962) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1769.710405] env[61962]: DEBUG nova.virt.vmwareapi.vmops [None req-6a898c7a-d8c8-4cee-8be6-7d5d37c58c60 tempest-ServerPasswordTestJSON-1801851891 tempest-ServerPasswordTestJSON-1801851891-project-member] [instance: c3c36949-754e-4bab-860d-74e0cde4dfd6] Deleted contents of the VM from datastore datastore2 {{(pid=61962) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 1769.710578] env[61962]: DEBUG nova.virt.vmwareapi.vmops [None req-6a898c7a-d8c8-4cee-8be6-7d5d37c58c60 tempest-ServerPasswordTestJSON-1801851891 tempest-ServerPasswordTestJSON-1801851891-project-member] [instance: c3c36949-754e-4bab-860d-74e0cde4dfd6] Instance destroyed {{(pid=61962) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1769.710752] env[61962]: INFO nova.compute.manager [None req-6a898c7a-d8c8-4cee-8be6-7d5d37c58c60 tempest-ServerPasswordTestJSON-1801851891 tempest-ServerPasswordTestJSON-1801851891-project-member] [instance: c3c36949-754e-4bab-860d-74e0cde4dfd6] Took 0.60 seconds to destroy the instance on the hypervisor. [ 1769.712755] env[61962]: DEBUG nova.compute.claims [None req-6a898c7a-d8c8-4cee-8be6-7d5d37c58c60 tempest-ServerPasswordTestJSON-1801851891 tempest-ServerPasswordTestJSON-1801851891-project-member] [instance: c3c36949-754e-4bab-860d-74e0cde4dfd6] Aborting claim: {{(pid=61962) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 1769.712920] env[61962]: DEBUG oslo_concurrency.lockutils [None req-6a898c7a-d8c8-4cee-8be6-7d5d37c58c60 tempest-ServerPasswordTestJSON-1801851891 tempest-ServerPasswordTestJSON-1801851891-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=61962) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1769.713141] env[61962]: DEBUG oslo_concurrency.lockutils [None req-6a898c7a-d8c8-4cee-8be6-7d5d37c58c60 tempest-ServerPasswordTestJSON-1801851891 tempest-ServerPasswordTestJSON-1801851891-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 0.000s {{(pid=61962) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1769.715621] env[61962]: DEBUG oslo_vmware.service [-] Invoking SessionManager.AcquireGenericServiceTicket with opID=oslo.vmware-aea0f86d-173a-40a5-bea9-751c0882238c {{(pid=61962) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1769.742383] env[61962]: DEBUG nova.virt.vmwareapi.images [None req-9dfce35c-cfdc-4c34-8fd9-1d51d314d579 tempest-AttachInterfacesTestJSON-87713823 tempest-AttachInterfacesTestJSON-87713823-project-member] [instance: 6f0c884b-afb3-4d06-9dd2-f07099c364e0] Downloading image file data f684bb17-3ab2-4bd5-a19e-4c36c57d0ebe to the data store datastore2 {{(pid=61962) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:245}} [ 1769.808163] env[61962]: DEBUG oslo_vmware.rw_handles [None req-9dfce35c-cfdc-4c34-8fd9-1d51d314d579 tempest-AttachInterfacesTestJSON-87713823 tempest-AttachInterfacesTestJSON-87713823-project-member] Creating HTTP connection to write to file with size = 21318656 and URL = https://esx7c2n1.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/9a6056c5-de1b-45df-a36a-5e28ffa08d32/f684bb17-3ab2-4bd5-a19e-4c36c57d0ebe/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=61962) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 1769.869034] env[61962]: DEBUG oslo_vmware.rw_handles [None req-9dfce35c-cfdc-4c34-8fd9-1d51d314d579 tempest-AttachInterfacesTestJSON-87713823 tempest-AttachInterfacesTestJSON-87713823-project-member] Completed reading data from the image iterator. {{(pid=61962) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 1769.869227] env[61962]: DEBUG oslo_vmware.rw_handles [None req-9dfce35c-cfdc-4c34-8fd9-1d51d314d579 tempest-AttachInterfacesTestJSON-87713823 tempest-AttachInterfacesTestJSON-87713823-project-member] Closing write handle for https://esx7c2n1.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/9a6056c5-de1b-45df-a36a-5e28ffa08d32/f684bb17-3ab2-4bd5-a19e-4c36c57d0ebe/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=61962) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:281}} [ 1769.965021] env[61962]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-da7358c1-da41-45a1-bf69-49529b9bff72 {{(pid=61962) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1769.973306] env[61962]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e20a86cc-fe20-4862-b1ad-7a89ca42daa0 {{(pid=61962) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1770.004755] env[61962]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5994aa09-2ceb-4412-a522-a8ec07528513 {{(pid=61962) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1770.012834] env[61962]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-760e788b-0677-40ee-8d1b-520ec39fbfb6 {{(pid=61962) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1770.026956] env[61962]: DEBUG nova.compute.provider_tree [None req-6a898c7a-d8c8-4cee-8be6-7d5d37c58c60 tempest-ServerPasswordTestJSON-1801851891 tempest-ServerPasswordTestJSON-1801851891-project-member] Inventory has not changed in ProviderTree for provider: 5a20dc57-fddd-49ec-bab5-953a03eebaa1 {{(pid=61962) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1770.035509] env[61962]: DEBUG nova.scheduler.client.report [None req-6a898c7a-d8c8-4cee-8be6-7d5d37c58c60 tempest-ServerPasswordTestJSON-1801851891 tempest-ServerPasswordTestJSON-1801851891-project-member] Inventory has not changed for provider 5a20dc57-fddd-49ec-bab5-953a03eebaa1 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 96, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61962) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1770.049900] env[61962]: DEBUG oslo_concurrency.lockutils [None req-6a898c7a-d8c8-4cee-8be6-7d5d37c58c60 tempest-ServerPasswordTestJSON-1801851891 tempest-ServerPasswordTestJSON-1801851891-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 0.337s {{(pid=61962) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1770.050447] env[61962]: ERROR nova.compute.manager [None req-6a898c7a-d8c8-4cee-8be6-7d5d37c58c60 tempest-ServerPasswordTestJSON-1801851891 tempest-ServerPasswordTestJSON-1801851891-project-member] [instance: c3c36949-754e-4bab-860d-74e0cde4dfd6] Failed to build and run instance: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1770.050447] env[61962]: Faults: ['InvalidArgument'] [ 1770.050447] env[61962]: ERROR nova.compute.manager [instance: c3c36949-754e-4bab-860d-74e0cde4dfd6] Traceback (most recent call last): [ 1770.050447] env[61962]: ERROR nova.compute.manager [instance: c3c36949-754e-4bab-860d-74e0cde4dfd6] File "/opt/stack/nova/nova/compute/manager.py", line 2633, in _build_and_run_instance [ 1770.050447] env[61962]: ERROR nova.compute.manager [instance: c3c36949-754e-4bab-860d-74e0cde4dfd6] self.driver.spawn(context, instance, image_meta, [ 1770.050447] env[61962]: ERROR nova.compute.manager [instance: c3c36949-754e-4bab-860d-74e0cde4dfd6] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 1770.050447] env[61962]: ERROR nova.compute.manager [instance: c3c36949-754e-4bab-860d-74e0cde4dfd6] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1770.050447] env[61962]: ERROR nova.compute.manager [instance: c3c36949-754e-4bab-860d-74e0cde4dfd6] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 1770.050447] env[61962]: ERROR nova.compute.manager [instance: c3c36949-754e-4bab-860d-74e0cde4dfd6] self._fetch_image_if_missing(context, vi) [ 1770.050447] env[61962]: ERROR nova.compute.manager [instance: c3c36949-754e-4bab-860d-74e0cde4dfd6] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 1770.050447] env[61962]: ERROR nova.compute.manager [instance: c3c36949-754e-4bab-860d-74e0cde4dfd6] image_cache(vi, tmp_image_ds_loc) [ 1770.050447] env[61962]: ERROR nova.compute.manager [instance: c3c36949-754e-4bab-860d-74e0cde4dfd6] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 1770.050810] env[61962]: ERROR nova.compute.manager [instance: c3c36949-754e-4bab-860d-74e0cde4dfd6] vm_util.copy_virtual_disk( [ 1770.050810] env[61962]: ERROR nova.compute.manager [instance: c3c36949-754e-4bab-860d-74e0cde4dfd6] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 1770.050810] env[61962]: ERROR nova.compute.manager [instance: c3c36949-754e-4bab-860d-74e0cde4dfd6] session._wait_for_task(vmdk_copy_task) [ 1770.050810] env[61962]: ERROR nova.compute.manager [instance: c3c36949-754e-4bab-860d-74e0cde4dfd6] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 1770.050810] env[61962]: ERROR nova.compute.manager [instance: c3c36949-754e-4bab-860d-74e0cde4dfd6] return self.wait_for_task(task_ref) [ 1770.050810] env[61962]: ERROR nova.compute.manager [instance: c3c36949-754e-4bab-860d-74e0cde4dfd6] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 1770.050810] env[61962]: ERROR nova.compute.manager [instance: c3c36949-754e-4bab-860d-74e0cde4dfd6] return evt.wait() [ 1770.050810] env[61962]: ERROR nova.compute.manager [instance: c3c36949-754e-4bab-860d-74e0cde4dfd6] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 1770.050810] env[61962]: ERROR nova.compute.manager [instance: c3c36949-754e-4bab-860d-74e0cde4dfd6] result = hub.switch() [ 1770.050810] env[61962]: ERROR nova.compute.manager [instance: c3c36949-754e-4bab-860d-74e0cde4dfd6] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 1770.050810] env[61962]: ERROR nova.compute.manager [instance: c3c36949-754e-4bab-860d-74e0cde4dfd6] return self.greenlet.switch() [ 1770.050810] env[61962]: ERROR nova.compute.manager [instance: c3c36949-754e-4bab-860d-74e0cde4dfd6] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 1770.050810] env[61962]: ERROR nova.compute.manager [instance: c3c36949-754e-4bab-860d-74e0cde4dfd6] self.f(*self.args, **self.kw) [ 1770.051206] env[61962]: ERROR nova.compute.manager [instance: c3c36949-754e-4bab-860d-74e0cde4dfd6] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 1770.051206] env[61962]: ERROR nova.compute.manager [instance: c3c36949-754e-4bab-860d-74e0cde4dfd6] raise exceptions.translate_fault(task_info.error) [ 1770.051206] env[61962]: ERROR nova.compute.manager [instance: c3c36949-754e-4bab-860d-74e0cde4dfd6] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1770.051206] env[61962]: ERROR nova.compute.manager [instance: c3c36949-754e-4bab-860d-74e0cde4dfd6] Faults: ['InvalidArgument'] [ 1770.051206] env[61962]: ERROR nova.compute.manager [instance: c3c36949-754e-4bab-860d-74e0cde4dfd6] [ 1770.051206] env[61962]: DEBUG nova.compute.utils [None req-6a898c7a-d8c8-4cee-8be6-7d5d37c58c60 tempest-ServerPasswordTestJSON-1801851891 tempest-ServerPasswordTestJSON-1801851891-project-member] [instance: c3c36949-754e-4bab-860d-74e0cde4dfd6] VimFaultException {{(pid=61962) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 1770.052861] env[61962]: DEBUG nova.compute.manager [None req-6a898c7a-d8c8-4cee-8be6-7d5d37c58c60 tempest-ServerPasswordTestJSON-1801851891 tempest-ServerPasswordTestJSON-1801851891-project-member] [instance: c3c36949-754e-4bab-860d-74e0cde4dfd6] Build of instance c3c36949-754e-4bab-860d-74e0cde4dfd6 was re-scheduled: A specified parameter was not correct: fileType [ 1770.052861] env[61962]: Faults: ['InvalidArgument'] {{(pid=61962) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2472}} [ 1770.053252] env[61962]: DEBUG nova.compute.manager [None req-6a898c7a-d8c8-4cee-8be6-7d5d37c58c60 tempest-ServerPasswordTestJSON-1801851891 tempest-ServerPasswordTestJSON-1801851891-project-member] [instance: c3c36949-754e-4bab-860d-74e0cde4dfd6] Unplugging VIFs for instance {{(pid=61962) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:2998}} [ 1770.053435] env[61962]: DEBUG nova.compute.manager [None req-6a898c7a-d8c8-4cee-8be6-7d5d37c58c60 tempest-ServerPasswordTestJSON-1801851891 tempest-ServerPasswordTestJSON-1801851891-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=61962) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3021}} [ 1770.053616] env[61962]: DEBUG nova.compute.manager [None req-6a898c7a-d8c8-4cee-8be6-7d5d37c58c60 tempest-ServerPasswordTestJSON-1801851891 tempest-ServerPasswordTestJSON-1801851891-project-member] [instance: c3c36949-754e-4bab-860d-74e0cde4dfd6] Deallocating network for instance {{(pid=61962) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2281}} [ 1770.053778] env[61962]: DEBUG nova.network.neutron [None req-6a898c7a-d8c8-4cee-8be6-7d5d37c58c60 tempest-ServerPasswordTestJSON-1801851891 tempest-ServerPasswordTestJSON-1801851891-project-member] [instance: c3c36949-754e-4bab-860d-74e0cde4dfd6] deallocate_for_instance() {{(pid=61962) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1770.488348] env[61962]: DEBUG nova.network.neutron [None req-6a898c7a-d8c8-4cee-8be6-7d5d37c58c60 tempest-ServerPasswordTestJSON-1801851891 tempest-ServerPasswordTestJSON-1801851891-project-member] [instance: c3c36949-754e-4bab-860d-74e0cde4dfd6] Updating instance_info_cache with network_info: [] {{(pid=61962) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1770.502211] env[61962]: INFO nova.compute.manager [None req-6a898c7a-d8c8-4cee-8be6-7d5d37c58c60 tempest-ServerPasswordTestJSON-1801851891 tempest-ServerPasswordTestJSON-1801851891-project-member] [instance: c3c36949-754e-4bab-860d-74e0cde4dfd6] Took 0.45 seconds to deallocate network for instance. [ 1770.608233] env[61962]: INFO nova.scheduler.client.report [None req-6a898c7a-d8c8-4cee-8be6-7d5d37c58c60 tempest-ServerPasswordTestJSON-1801851891 tempest-ServerPasswordTestJSON-1801851891-project-member] Deleted allocations for instance c3c36949-754e-4bab-860d-74e0cde4dfd6 [ 1770.629450] env[61962]: DEBUG oslo_concurrency.lockutils [None req-6a898c7a-d8c8-4cee-8be6-7d5d37c58c60 tempest-ServerPasswordTestJSON-1801851891 tempest-ServerPasswordTestJSON-1801851891-project-member] Lock "c3c36949-754e-4bab-860d-74e0cde4dfd6" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 611.383s {{(pid=61962) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1770.629908] env[61962]: DEBUG oslo_concurrency.lockutils [None req-f83271b1-1065-4bfa-a828-82740b56297b tempest-ServerPasswordTestJSON-1801851891 tempest-ServerPasswordTestJSON-1801851891-project-member] Lock "c3c36949-754e-4bab-860d-74e0cde4dfd6" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 415.923s {{(pid=61962) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1770.630155] env[61962]: DEBUG oslo_concurrency.lockutils [None req-f83271b1-1065-4bfa-a828-82740b56297b tempest-ServerPasswordTestJSON-1801851891 tempest-ServerPasswordTestJSON-1801851891-project-member] Acquiring lock "c3c36949-754e-4bab-860d-74e0cde4dfd6-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=61962) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1770.630368] env[61962]: DEBUG oslo_concurrency.lockutils [None req-f83271b1-1065-4bfa-a828-82740b56297b tempest-ServerPasswordTestJSON-1801851891 tempest-ServerPasswordTestJSON-1801851891-project-member] Lock "c3c36949-754e-4bab-860d-74e0cde4dfd6-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=61962) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1770.630532] env[61962]: DEBUG oslo_concurrency.lockutils [None req-f83271b1-1065-4bfa-a828-82740b56297b tempest-ServerPasswordTestJSON-1801851891 tempest-ServerPasswordTestJSON-1801851891-project-member] Lock "c3c36949-754e-4bab-860d-74e0cde4dfd6-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=61962) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1770.632983] env[61962]: INFO nova.compute.manager [None req-f83271b1-1065-4bfa-a828-82740b56297b tempest-ServerPasswordTestJSON-1801851891 tempest-ServerPasswordTestJSON-1801851891-project-member] [instance: c3c36949-754e-4bab-860d-74e0cde4dfd6] Terminating instance [ 1770.635058] env[61962]: DEBUG nova.compute.manager [None req-f83271b1-1065-4bfa-a828-82740b56297b tempest-ServerPasswordTestJSON-1801851891 tempest-ServerPasswordTestJSON-1801851891-project-member] [instance: c3c36949-754e-4bab-860d-74e0cde4dfd6] Start destroying the instance on the hypervisor. {{(pid=61962) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3142}} [ 1770.635602] env[61962]: DEBUG nova.virt.vmwareapi.vmops [None req-f83271b1-1065-4bfa-a828-82740b56297b tempest-ServerPasswordTestJSON-1801851891 tempest-ServerPasswordTestJSON-1801851891-project-member] [instance: c3c36949-754e-4bab-860d-74e0cde4dfd6] Destroying instance {{(pid=61962) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1770.635602] env[61962]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-4912af17-2935-4968-859f-d088c5369741 {{(pid=61962) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1770.645635] env[61962]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-233a73c7-93e6-4317-97d0-7db0936b840e {{(pid=61962) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1770.676916] env[61962]: WARNING nova.virt.vmwareapi.vmops [None req-f83271b1-1065-4bfa-a828-82740b56297b tempest-ServerPasswordTestJSON-1801851891 tempest-ServerPasswordTestJSON-1801851891-project-member] [instance: c3c36949-754e-4bab-860d-74e0cde4dfd6] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance c3c36949-754e-4bab-860d-74e0cde4dfd6 could not be found. [ 1770.677147] env[61962]: DEBUG nova.virt.vmwareapi.vmops [None req-f83271b1-1065-4bfa-a828-82740b56297b tempest-ServerPasswordTestJSON-1801851891 tempest-ServerPasswordTestJSON-1801851891-project-member] [instance: c3c36949-754e-4bab-860d-74e0cde4dfd6] Instance destroyed {{(pid=61962) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1770.677332] env[61962]: INFO nova.compute.manager [None req-f83271b1-1065-4bfa-a828-82740b56297b tempest-ServerPasswordTestJSON-1801851891 tempest-ServerPasswordTestJSON-1801851891-project-member] [instance: c3c36949-754e-4bab-860d-74e0cde4dfd6] Took 0.04 seconds to destroy the instance on the hypervisor. [ 1770.677580] env[61962]: DEBUG oslo.service.loopingcall [None req-f83271b1-1065-4bfa-a828-82740b56297b tempest-ServerPasswordTestJSON-1801851891 tempest-ServerPasswordTestJSON-1801851891-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61962) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1770.677802] env[61962]: DEBUG nova.compute.manager [-] [instance: c3c36949-754e-4bab-860d-74e0cde4dfd6] Deallocating network for instance {{(pid=61962) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2281}} [ 1770.677895] env[61962]: DEBUG nova.network.neutron [-] [instance: c3c36949-754e-4bab-860d-74e0cde4dfd6] deallocate_for_instance() {{(pid=61962) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1770.703970] env[61962]: DEBUG nova.network.neutron [-] [instance: c3c36949-754e-4bab-860d-74e0cde4dfd6] Updating instance_info_cache with network_info: [] {{(pid=61962) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1770.713993] env[61962]: INFO nova.compute.manager [-] [instance: c3c36949-754e-4bab-860d-74e0cde4dfd6] Took 0.04 seconds to deallocate network for instance. [ 1770.814368] env[61962]: DEBUG oslo_concurrency.lockutils [None req-f83271b1-1065-4bfa-a828-82740b56297b tempest-ServerPasswordTestJSON-1801851891 tempest-ServerPasswordTestJSON-1801851891-project-member] Lock "c3c36949-754e-4bab-860d-74e0cde4dfd6" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 0.184s {{(pid=61962) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1770.815652] env[61962]: DEBUG oslo_concurrency.lockutils [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Lock "c3c36949-754e-4bab-860d-74e0cde4dfd6" acquired by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: waited 33.298s {{(pid=61962) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1770.815947] env[61962]: INFO nova.compute.manager [None req-ba337279-4320-40ca-b616-7e590432f203 None None] [instance: c3c36949-754e-4bab-860d-74e0cde4dfd6] During sync_power_state the instance has a pending task (deleting). Skip. [ 1770.816165] env[61962]: DEBUG oslo_concurrency.lockutils [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Lock "c3c36949-754e-4bab-860d-74e0cde4dfd6" "released" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: held 0.001s {{(pid=61962) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1772.672101] env[61962]: DEBUG oslo_service.periodic_task [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=61962) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1772.672525] env[61962]: DEBUG nova.compute.manager [None req-ba337279-4320-40ca-b616-7e590432f203 None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=61962) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10626}} [ 1775.154665] env[61962]: DEBUG oslo_service.periodic_task [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=61962) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1779.157897] env[61962]: DEBUG oslo_service.periodic_task [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=61962) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1780.159465] env[61962]: DEBUG oslo_service.periodic_task [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=61962) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1781.159518] env[61962]: DEBUG oslo_service.periodic_task [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=61962) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1781.159851] env[61962]: DEBUG nova.compute.manager [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Starting heal instance info cache {{(pid=61962) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10007}} [ 1781.159851] env[61962]: DEBUG nova.compute.manager [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Rebuilding the list of instances to heal {{(pid=61962) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10011}} [ 1781.182997] env[61962]: DEBUG nova.compute.manager [None req-ba337279-4320-40ca-b616-7e590432f203 None None] [instance: 6f0c884b-afb3-4d06-9dd2-f07099c364e0] Skipping network cache update for instance because it is Building. {{(pid=61962) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10020}} [ 1781.183196] env[61962]: DEBUG nova.compute.manager [None req-ba337279-4320-40ca-b616-7e590432f203 None None] [instance: 3d2749f8-18d2-4631-adc2-093988125eb4] Skipping network cache update for instance because it is Building. {{(pid=61962) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10020}} [ 1781.183492] env[61962]: DEBUG nova.compute.manager [None req-ba337279-4320-40ca-b616-7e590432f203 None None] [instance: 1035139f-2193-4d1c-a220-369ef6096ae0] Skipping network cache update for instance because it is Building. {{(pid=61962) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10020}} [ 1781.183644] env[61962]: DEBUG nova.compute.manager [None req-ba337279-4320-40ca-b616-7e590432f203 None None] [instance: 53048279-c3ea-484f-9a3e-049b12892f7a] Skipping network cache update for instance because it is Building. {{(pid=61962) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10020}} [ 1781.183773] env[61962]: DEBUG nova.compute.manager [None req-ba337279-4320-40ca-b616-7e590432f203 None None] [instance: 25f6fa54-3d0d-4470-b5c9-06ba0ad2663d] Skipping network cache update for instance because it is Building. {{(pid=61962) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10020}} [ 1781.183896] env[61962]: DEBUG nova.compute.manager [None req-ba337279-4320-40ca-b616-7e590432f203 None None] [instance: 1e71cc99-f026-4197-b7f1-ae18f7f99af6] Skipping network cache update for instance because it is Building. {{(pid=61962) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10020}} [ 1781.184025] env[61962]: DEBUG nova.compute.manager [None req-ba337279-4320-40ca-b616-7e590432f203 None None] [instance: 2cf5409f-eaff-4b63-b8bb-eb15a7a81760] Skipping network cache update for instance because it is Building. {{(pid=61962) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10020}} [ 1781.184147] env[61962]: DEBUG nova.compute.manager [None req-ba337279-4320-40ca-b616-7e590432f203 None None] [instance: 02c198ea-61de-4e70-866c-39781d353d04] Skipping network cache update for instance because it is Building. {{(pid=61962) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10020}} [ 1781.184287] env[61962]: DEBUG nova.compute.manager [None req-ba337279-4320-40ca-b616-7e590432f203 None None] [instance: 85003695-cb3e-4bce-9a3a-a1d799b36369] Skipping network cache update for instance because it is Building. {{(pid=61962) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10020}} [ 1781.184431] env[61962]: DEBUG nova.compute.manager [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Didn't find any instances for network info cache update. {{(pid=61962) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10093}} [ 1782.159029] env[61962]: DEBUG oslo_service.periodic_task [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=61962) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1785.159720] env[61962]: DEBUG oslo_service.periodic_task [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=61962) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1785.159720] env[61962]: DEBUG oslo_service.periodic_task [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=61962) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1816.903560] env[61962]: WARNING oslo_vmware.rw_handles [None req-9dfce35c-cfdc-4c34-8fd9-1d51d314d579 tempest-AttachInterfacesTestJSON-87713823 tempest-AttachInterfacesTestJSON-87713823-project-member] Error occurred while reading the HTTP response.: http.client.RemoteDisconnected: Remote end closed connection without response [ 1816.903560] env[61962]: ERROR oslo_vmware.rw_handles Traceback (most recent call last): [ 1816.903560] env[61962]: ERROR oslo_vmware.rw_handles File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py", line 283, in close [ 1816.903560] env[61962]: ERROR oslo_vmware.rw_handles self._conn.getresponse() [ 1816.903560] env[61962]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 1375, in getresponse [ 1816.903560] env[61962]: ERROR oslo_vmware.rw_handles response.begin() [ 1816.903560] env[61962]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 318, in begin [ 1816.903560] env[61962]: ERROR oslo_vmware.rw_handles version, status, reason = self._read_status() [ 1816.903560] env[61962]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 287, in _read_status [ 1816.903560] env[61962]: ERROR oslo_vmware.rw_handles raise RemoteDisconnected("Remote end closed connection without" [ 1816.903560] env[61962]: ERROR oslo_vmware.rw_handles http.client.RemoteDisconnected: Remote end closed connection without response [ 1816.903560] env[61962]: ERROR oslo_vmware.rw_handles [ 1816.904252] env[61962]: DEBUG nova.virt.vmwareapi.images [None req-9dfce35c-cfdc-4c34-8fd9-1d51d314d579 tempest-AttachInterfacesTestJSON-87713823 tempest-AttachInterfacesTestJSON-87713823-project-member] [instance: 6f0c884b-afb3-4d06-9dd2-f07099c364e0] Downloaded image file data f684bb17-3ab2-4bd5-a19e-4c36c57d0ebe to vmware_temp/9a6056c5-de1b-45df-a36a-5e28ffa08d32/f684bb17-3ab2-4bd5-a19e-4c36c57d0ebe/tmp-sparse.vmdk on the data store datastore2 {{(pid=61962) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:258}} [ 1816.905980] env[61962]: DEBUG nova.virt.vmwareapi.vmops [None req-9dfce35c-cfdc-4c34-8fd9-1d51d314d579 tempest-AttachInterfacesTestJSON-87713823 tempest-AttachInterfacesTestJSON-87713823-project-member] [instance: 6f0c884b-afb3-4d06-9dd2-f07099c364e0] Caching image {{(pid=61962) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 1816.906236] env[61962]: DEBUG nova.virt.vmwareapi.vm_util [None req-9dfce35c-cfdc-4c34-8fd9-1d51d314d579 tempest-AttachInterfacesTestJSON-87713823 tempest-AttachInterfacesTestJSON-87713823-project-member] Copying Virtual Disk [datastore2] vmware_temp/9a6056c5-de1b-45df-a36a-5e28ffa08d32/f684bb17-3ab2-4bd5-a19e-4c36c57d0ebe/tmp-sparse.vmdk to [datastore2] vmware_temp/9a6056c5-de1b-45df-a36a-5e28ffa08d32/f684bb17-3ab2-4bd5-a19e-4c36c57d0ebe/f684bb17-3ab2-4bd5-a19e-4c36c57d0ebe.vmdk {{(pid=61962) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 1816.906532] env[61962]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-fcb0fd2d-64cf-436b-9b34-303fd20f26eb {{(pid=61962) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1816.916809] env[61962]: DEBUG oslo_vmware.api [None req-9dfce35c-cfdc-4c34-8fd9-1d51d314d579 tempest-AttachInterfacesTestJSON-87713823 tempest-AttachInterfacesTestJSON-87713823-project-member] Waiting for the task: (returnval){ [ 1816.916809] env[61962]: value = "task-4892080" [ 1816.916809] env[61962]: _type = "Task" [ 1816.916809] env[61962]: } to complete. {{(pid=61962) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1816.925608] env[61962]: DEBUG oslo_vmware.api [None req-9dfce35c-cfdc-4c34-8fd9-1d51d314d579 tempest-AttachInterfacesTestJSON-87713823 tempest-AttachInterfacesTestJSON-87713823-project-member] Task: {'id': task-4892080, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61962) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1817.427310] env[61962]: DEBUG oslo_vmware.exceptions [None req-9dfce35c-cfdc-4c34-8fd9-1d51d314d579 tempest-AttachInterfacesTestJSON-87713823 tempest-AttachInterfacesTestJSON-87713823-project-member] Fault InvalidArgument not matched. {{(pid=61962) get_fault_class /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/exceptions.py:290}} [ 1817.427600] env[61962]: DEBUG oslo_concurrency.lockutils [None req-9dfce35c-cfdc-4c34-8fd9-1d51d314d579 tempest-AttachInterfacesTestJSON-87713823 tempest-AttachInterfacesTestJSON-87713823-project-member] Releasing lock "[datastore2] devstack-image-cache_base/f684bb17-3ab2-4bd5-a19e-4c36c57d0ebe/f684bb17-3ab2-4bd5-a19e-4c36c57d0ebe.vmdk" {{(pid=61962) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1817.428173] env[61962]: ERROR nova.compute.manager [None req-9dfce35c-cfdc-4c34-8fd9-1d51d314d579 tempest-AttachInterfacesTestJSON-87713823 tempest-AttachInterfacesTestJSON-87713823-project-member] [instance: 6f0c884b-afb3-4d06-9dd2-f07099c364e0] Instance failed to spawn: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1817.428173] env[61962]: Faults: ['InvalidArgument'] [ 1817.428173] env[61962]: ERROR nova.compute.manager [instance: 6f0c884b-afb3-4d06-9dd2-f07099c364e0] Traceback (most recent call last): [ 1817.428173] env[61962]: ERROR nova.compute.manager [instance: 6f0c884b-afb3-4d06-9dd2-f07099c364e0] File "/opt/stack/nova/nova/compute/manager.py", line 2886, in _build_resources [ 1817.428173] env[61962]: ERROR nova.compute.manager [instance: 6f0c884b-afb3-4d06-9dd2-f07099c364e0] yield resources [ 1817.428173] env[61962]: ERROR nova.compute.manager [instance: 6f0c884b-afb3-4d06-9dd2-f07099c364e0] File "/opt/stack/nova/nova/compute/manager.py", line 2633, in _build_and_run_instance [ 1817.428173] env[61962]: ERROR nova.compute.manager [instance: 6f0c884b-afb3-4d06-9dd2-f07099c364e0] self.driver.spawn(context, instance, image_meta, [ 1817.428173] env[61962]: ERROR nova.compute.manager [instance: 6f0c884b-afb3-4d06-9dd2-f07099c364e0] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 1817.428173] env[61962]: ERROR nova.compute.manager [instance: 6f0c884b-afb3-4d06-9dd2-f07099c364e0] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1817.428173] env[61962]: ERROR nova.compute.manager [instance: 6f0c884b-afb3-4d06-9dd2-f07099c364e0] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 1817.428173] env[61962]: ERROR nova.compute.manager [instance: 6f0c884b-afb3-4d06-9dd2-f07099c364e0] self._fetch_image_if_missing(context, vi) [ 1817.428173] env[61962]: ERROR nova.compute.manager [instance: 6f0c884b-afb3-4d06-9dd2-f07099c364e0] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 1817.428644] env[61962]: ERROR nova.compute.manager [instance: 6f0c884b-afb3-4d06-9dd2-f07099c364e0] image_cache(vi, tmp_image_ds_loc) [ 1817.428644] env[61962]: ERROR nova.compute.manager [instance: 6f0c884b-afb3-4d06-9dd2-f07099c364e0] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 1817.428644] env[61962]: ERROR nova.compute.manager [instance: 6f0c884b-afb3-4d06-9dd2-f07099c364e0] vm_util.copy_virtual_disk( [ 1817.428644] env[61962]: ERROR nova.compute.manager [instance: 6f0c884b-afb3-4d06-9dd2-f07099c364e0] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 1817.428644] env[61962]: ERROR nova.compute.manager [instance: 6f0c884b-afb3-4d06-9dd2-f07099c364e0] session._wait_for_task(vmdk_copy_task) [ 1817.428644] env[61962]: ERROR nova.compute.manager [instance: 6f0c884b-afb3-4d06-9dd2-f07099c364e0] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 1817.428644] env[61962]: ERROR nova.compute.manager [instance: 6f0c884b-afb3-4d06-9dd2-f07099c364e0] return self.wait_for_task(task_ref) [ 1817.428644] env[61962]: ERROR nova.compute.manager [instance: 6f0c884b-afb3-4d06-9dd2-f07099c364e0] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 1817.428644] env[61962]: ERROR nova.compute.manager [instance: 6f0c884b-afb3-4d06-9dd2-f07099c364e0] return evt.wait() [ 1817.428644] env[61962]: ERROR nova.compute.manager [instance: 6f0c884b-afb3-4d06-9dd2-f07099c364e0] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 1817.428644] env[61962]: ERROR nova.compute.manager [instance: 6f0c884b-afb3-4d06-9dd2-f07099c364e0] result = hub.switch() [ 1817.428644] env[61962]: ERROR nova.compute.manager [instance: 6f0c884b-afb3-4d06-9dd2-f07099c364e0] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 1817.428644] env[61962]: ERROR nova.compute.manager [instance: 6f0c884b-afb3-4d06-9dd2-f07099c364e0] return self.greenlet.switch() [ 1817.429144] env[61962]: ERROR nova.compute.manager [instance: 6f0c884b-afb3-4d06-9dd2-f07099c364e0] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 1817.429144] env[61962]: ERROR nova.compute.manager [instance: 6f0c884b-afb3-4d06-9dd2-f07099c364e0] self.f(*self.args, **self.kw) [ 1817.429144] env[61962]: ERROR nova.compute.manager [instance: 6f0c884b-afb3-4d06-9dd2-f07099c364e0] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 1817.429144] env[61962]: ERROR nova.compute.manager [instance: 6f0c884b-afb3-4d06-9dd2-f07099c364e0] raise exceptions.translate_fault(task_info.error) [ 1817.429144] env[61962]: ERROR nova.compute.manager [instance: 6f0c884b-afb3-4d06-9dd2-f07099c364e0] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1817.429144] env[61962]: ERROR nova.compute.manager [instance: 6f0c884b-afb3-4d06-9dd2-f07099c364e0] Faults: ['InvalidArgument'] [ 1817.429144] env[61962]: ERROR nova.compute.manager [instance: 6f0c884b-afb3-4d06-9dd2-f07099c364e0] [ 1817.429144] env[61962]: INFO nova.compute.manager [None req-9dfce35c-cfdc-4c34-8fd9-1d51d314d579 tempest-AttachInterfacesTestJSON-87713823 tempest-AttachInterfacesTestJSON-87713823-project-member] [instance: 6f0c884b-afb3-4d06-9dd2-f07099c364e0] Terminating instance [ 1817.430068] env[61962]: DEBUG oslo_concurrency.lockutils [None req-ae999722-5278-4a0e-8508-ee195677c4b3 tempest-ServerMetadataTestJSON-1606436818 tempest-ServerMetadataTestJSON-1606436818-project-member] Acquired lock "[datastore2] devstack-image-cache_base/f684bb17-3ab2-4bd5-a19e-4c36c57d0ebe/f684bb17-3ab2-4bd5-a19e-4c36c57d0ebe.vmdk" {{(pid=61962) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1817.430281] env[61962]: DEBUG nova.virt.vmwareapi.ds_util [None req-ae999722-5278-4a0e-8508-ee195677c4b3 tempest-ServerMetadataTestJSON-1606436818 tempest-ServerMetadataTestJSON-1606436818-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=61962) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1817.430518] env[61962]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-869315c8-a380-429f-8ce0-becc408f568b {{(pid=61962) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1817.432892] env[61962]: DEBUG nova.compute.manager [None req-9dfce35c-cfdc-4c34-8fd9-1d51d314d579 tempest-AttachInterfacesTestJSON-87713823 tempest-AttachInterfacesTestJSON-87713823-project-member] [instance: 6f0c884b-afb3-4d06-9dd2-f07099c364e0] Start destroying the instance on the hypervisor. {{(pid=61962) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3142}} [ 1817.433096] env[61962]: DEBUG nova.virt.vmwareapi.vmops [None req-9dfce35c-cfdc-4c34-8fd9-1d51d314d579 tempest-AttachInterfacesTestJSON-87713823 tempest-AttachInterfacesTestJSON-87713823-project-member] [instance: 6f0c884b-afb3-4d06-9dd2-f07099c364e0] Destroying instance {{(pid=61962) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1817.433792] env[61962]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a96f832c-2a43-4e23-8507-d9f03fa0760c {{(pid=61962) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1817.440861] env[61962]: DEBUG nova.virt.vmwareapi.vmops [None req-9dfce35c-cfdc-4c34-8fd9-1d51d314d579 tempest-AttachInterfacesTestJSON-87713823 tempest-AttachInterfacesTestJSON-87713823-project-member] [instance: 6f0c884b-afb3-4d06-9dd2-f07099c364e0] Unregistering the VM {{(pid=61962) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 1817.441059] env[61962]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-2104290b-fd77-4ffc-93b8-0a252bd758ed {{(pid=61962) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1817.443228] env[61962]: DEBUG nova.virt.vmwareapi.ds_util [None req-ae999722-5278-4a0e-8508-ee195677c4b3 tempest-ServerMetadataTestJSON-1606436818 tempest-ServerMetadataTestJSON-1606436818-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=61962) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1817.443401] env[61962]: DEBUG nova.virt.vmwareapi.vmops [None req-ae999722-5278-4a0e-8508-ee195677c4b3 tempest-ServerMetadataTestJSON-1606436818 tempest-ServerMetadataTestJSON-1606436818-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=61962) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 1817.444342] env[61962]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-41d67b4c-808b-4607-967e-95c4f3607cb7 {{(pid=61962) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1817.449302] env[61962]: DEBUG oslo_vmware.api [None req-ae999722-5278-4a0e-8508-ee195677c4b3 tempest-ServerMetadataTestJSON-1606436818 tempest-ServerMetadataTestJSON-1606436818-project-member] Waiting for the task: (returnval){ [ 1817.449302] env[61962]: value = "session[5210918f-aadc-9b29-42fa-0929c2e42627]52067f13-c40a-eb53-926f-568d929b762d" [ 1817.449302] env[61962]: _type = "Task" [ 1817.449302] env[61962]: } to complete. {{(pid=61962) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1817.456716] env[61962]: DEBUG oslo_vmware.api [None req-ae999722-5278-4a0e-8508-ee195677c4b3 tempest-ServerMetadataTestJSON-1606436818 tempest-ServerMetadataTestJSON-1606436818-project-member] Task: {'id': session[5210918f-aadc-9b29-42fa-0929c2e42627]52067f13-c40a-eb53-926f-568d929b762d, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61962) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1817.509993] env[61962]: DEBUG nova.virt.vmwareapi.vmops [None req-9dfce35c-cfdc-4c34-8fd9-1d51d314d579 tempest-AttachInterfacesTestJSON-87713823 tempest-AttachInterfacesTestJSON-87713823-project-member] [instance: 6f0c884b-afb3-4d06-9dd2-f07099c364e0] Unregistered the VM {{(pid=61962) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 1817.510212] env[61962]: DEBUG nova.virt.vmwareapi.vmops [None req-9dfce35c-cfdc-4c34-8fd9-1d51d314d579 tempest-AttachInterfacesTestJSON-87713823 tempest-AttachInterfacesTestJSON-87713823-project-member] [instance: 6f0c884b-afb3-4d06-9dd2-f07099c364e0] Deleting contents of the VM from datastore datastore2 {{(pid=61962) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 1817.510394] env[61962]: DEBUG nova.virt.vmwareapi.ds_util [None req-9dfce35c-cfdc-4c34-8fd9-1d51d314d579 tempest-AttachInterfacesTestJSON-87713823 tempest-AttachInterfacesTestJSON-87713823-project-member] Deleting the datastore file [datastore2] 6f0c884b-afb3-4d06-9dd2-f07099c364e0 {{(pid=61962) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1817.510667] env[61962]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-8d668df6-fbaa-41d5-a03c-f3f27886def9 {{(pid=61962) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1817.518661] env[61962]: DEBUG oslo_vmware.api [None req-9dfce35c-cfdc-4c34-8fd9-1d51d314d579 tempest-AttachInterfacesTestJSON-87713823 tempest-AttachInterfacesTestJSON-87713823-project-member] Waiting for the task: (returnval){ [ 1817.518661] env[61962]: value = "task-4892082" [ 1817.518661] env[61962]: _type = "Task" [ 1817.518661] env[61962]: } to complete. {{(pid=61962) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1817.526762] env[61962]: DEBUG oslo_vmware.api [None req-9dfce35c-cfdc-4c34-8fd9-1d51d314d579 tempest-AttachInterfacesTestJSON-87713823 tempest-AttachInterfacesTestJSON-87713823-project-member] Task: {'id': task-4892082, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61962) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1817.960018] env[61962]: DEBUG nova.virt.vmwareapi.vmops [None req-ae999722-5278-4a0e-8508-ee195677c4b3 tempest-ServerMetadataTestJSON-1606436818 tempest-ServerMetadataTestJSON-1606436818-project-member] [instance: 3d2749f8-18d2-4631-adc2-093988125eb4] Preparing fetch location {{(pid=61962) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 1817.960351] env[61962]: DEBUG nova.virt.vmwareapi.ds_util [None req-ae999722-5278-4a0e-8508-ee195677c4b3 tempest-ServerMetadataTestJSON-1606436818 tempest-ServerMetadataTestJSON-1606436818-project-member] Creating directory with path [datastore2] vmware_temp/c8ff9423-1d4e-4688-b438-d2a776aed13f/f684bb17-3ab2-4bd5-a19e-4c36c57d0ebe {{(pid=61962) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1817.960532] env[61962]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-5f8021df-e4be-40db-af51-b065849b58e7 {{(pid=61962) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1817.974399] env[61962]: DEBUG nova.virt.vmwareapi.ds_util [None req-ae999722-5278-4a0e-8508-ee195677c4b3 tempest-ServerMetadataTestJSON-1606436818 tempest-ServerMetadataTestJSON-1606436818-project-member] Created directory with path [datastore2] vmware_temp/c8ff9423-1d4e-4688-b438-d2a776aed13f/f684bb17-3ab2-4bd5-a19e-4c36c57d0ebe {{(pid=61962) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1817.974604] env[61962]: DEBUG nova.virt.vmwareapi.vmops [None req-ae999722-5278-4a0e-8508-ee195677c4b3 tempest-ServerMetadataTestJSON-1606436818 tempest-ServerMetadataTestJSON-1606436818-project-member] [instance: 3d2749f8-18d2-4631-adc2-093988125eb4] Fetch image to [datastore2] vmware_temp/c8ff9423-1d4e-4688-b438-d2a776aed13f/f684bb17-3ab2-4bd5-a19e-4c36c57d0ebe/tmp-sparse.vmdk {{(pid=61962) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 1817.974772] env[61962]: DEBUG nova.virt.vmwareapi.vmops [None req-ae999722-5278-4a0e-8508-ee195677c4b3 tempest-ServerMetadataTestJSON-1606436818 tempest-ServerMetadataTestJSON-1606436818-project-member] [instance: 3d2749f8-18d2-4631-adc2-093988125eb4] Downloading image file data f684bb17-3ab2-4bd5-a19e-4c36c57d0ebe to [datastore2] vmware_temp/c8ff9423-1d4e-4688-b438-d2a776aed13f/f684bb17-3ab2-4bd5-a19e-4c36c57d0ebe/tmp-sparse.vmdk on the data store datastore2 {{(pid=61962) _fetch_image_as_file /opt/stack/nova/nova/virt/vmwareapi/vmops.py:399}} [ 1817.975573] env[61962]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f628f948-cd43-40f7-8c52-79b2b8813009 {{(pid=61962) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1817.983160] env[61962]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2f05802e-7acc-4c86-a534-04d225110485 {{(pid=61962) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1817.993823] env[61962]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ace423e3-61fc-4fc5-b005-01ff217d0485 {{(pid=61962) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1818.028210] env[61962]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-610ecc59-a7f6-4f72-99da-e55433e39899 {{(pid=61962) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1818.036035] env[61962]: DEBUG oslo_vmware.api [None req-9dfce35c-cfdc-4c34-8fd9-1d51d314d579 tempest-AttachInterfacesTestJSON-87713823 tempest-AttachInterfacesTestJSON-87713823-project-member] Task: {'id': task-4892082, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.080887} completed successfully. {{(pid=61962) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1818.037703] env[61962]: DEBUG nova.virt.vmwareapi.ds_util [None req-9dfce35c-cfdc-4c34-8fd9-1d51d314d579 tempest-AttachInterfacesTestJSON-87713823 tempest-AttachInterfacesTestJSON-87713823-project-member] Deleted the datastore file {{(pid=61962) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1818.037900] env[61962]: DEBUG nova.virt.vmwareapi.vmops [None req-9dfce35c-cfdc-4c34-8fd9-1d51d314d579 tempest-AttachInterfacesTestJSON-87713823 tempest-AttachInterfacesTestJSON-87713823-project-member] [instance: 6f0c884b-afb3-4d06-9dd2-f07099c364e0] Deleted contents of the VM from datastore datastore2 {{(pid=61962) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 1818.038088] env[61962]: DEBUG nova.virt.vmwareapi.vmops [None req-9dfce35c-cfdc-4c34-8fd9-1d51d314d579 tempest-AttachInterfacesTestJSON-87713823 tempest-AttachInterfacesTestJSON-87713823-project-member] [instance: 6f0c884b-afb3-4d06-9dd2-f07099c364e0] Instance destroyed {{(pid=61962) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1818.038277] env[61962]: INFO nova.compute.manager [None req-9dfce35c-cfdc-4c34-8fd9-1d51d314d579 tempest-AttachInterfacesTestJSON-87713823 tempest-AttachInterfacesTestJSON-87713823-project-member] [instance: 6f0c884b-afb3-4d06-9dd2-f07099c364e0] Took 0.61 seconds to destroy the instance on the hypervisor. [ 1818.040378] env[61962]: DEBUG nova.compute.claims [None req-9dfce35c-cfdc-4c34-8fd9-1d51d314d579 tempest-AttachInterfacesTestJSON-87713823 tempest-AttachInterfacesTestJSON-87713823-project-member] [instance: 6f0c884b-afb3-4d06-9dd2-f07099c364e0] Aborting claim: {{(pid=61962) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 1818.040593] env[61962]: DEBUG oslo_concurrency.lockutils [None req-9dfce35c-cfdc-4c34-8fd9-1d51d314d579 tempest-AttachInterfacesTestJSON-87713823 tempest-AttachInterfacesTestJSON-87713823-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=61962) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1818.040749] env[61962]: DEBUG oslo_concurrency.lockutils [None req-9dfce35c-cfdc-4c34-8fd9-1d51d314d579 tempest-AttachInterfacesTestJSON-87713823 tempest-AttachInterfacesTestJSON-87713823-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 0.000s {{(pid=61962) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1818.043427] env[61962]: DEBUG oslo_vmware.service [-] Invoking SessionManager.AcquireGenericServiceTicket with opID=oslo.vmware-0c035008-265e-4299-924e-8027a2ced81c {{(pid=61962) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1818.067732] env[61962]: DEBUG nova.virt.vmwareapi.images [None req-ae999722-5278-4a0e-8508-ee195677c4b3 tempest-ServerMetadataTestJSON-1606436818 tempest-ServerMetadataTestJSON-1606436818-project-member] [instance: 3d2749f8-18d2-4631-adc2-093988125eb4] Downloading image file data f684bb17-3ab2-4bd5-a19e-4c36c57d0ebe to the data store datastore2 {{(pid=61962) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:245}} [ 1818.121500] env[61962]: DEBUG oslo_vmware.rw_handles [None req-ae999722-5278-4a0e-8508-ee195677c4b3 tempest-ServerMetadataTestJSON-1606436818 tempest-ServerMetadataTestJSON-1606436818-project-member] Creating HTTP connection to write to file with size = 21318656 and URL = https://esx7c1n3.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/c8ff9423-1d4e-4688-b438-d2a776aed13f/f684bb17-3ab2-4bd5-a19e-4c36c57d0ebe/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=61962) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 1818.180974] env[61962]: DEBUG oslo_vmware.rw_handles [None req-ae999722-5278-4a0e-8508-ee195677c4b3 tempest-ServerMetadataTestJSON-1606436818 tempest-ServerMetadataTestJSON-1606436818-project-member] Completed reading data from the image iterator. {{(pid=61962) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 1818.181187] env[61962]: DEBUG oslo_vmware.rw_handles [None req-ae999722-5278-4a0e-8508-ee195677c4b3 tempest-ServerMetadataTestJSON-1606436818 tempest-ServerMetadataTestJSON-1606436818-project-member] Closing write handle for https://esx7c1n3.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/c8ff9423-1d4e-4688-b438-d2a776aed13f/f684bb17-3ab2-4bd5-a19e-4c36c57d0ebe/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=61962) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:281}} [ 1818.253958] env[61962]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-91dbad32-9618-4e8b-a1f6-b445b84f47f0 {{(pid=61962) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1818.262878] env[61962]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f210c5ad-afdc-4e55-ab4f-a086b80d782b {{(pid=61962) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1818.293247] env[61962]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-39eef78b-8673-41d6-904b-5d5c7a3e52f1 {{(pid=61962) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1818.302037] env[61962]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b0a9d39f-e872-468f-a9e5-e376b51b9104 {{(pid=61962) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1818.316308] env[61962]: DEBUG nova.compute.provider_tree [None req-9dfce35c-cfdc-4c34-8fd9-1d51d314d579 tempest-AttachInterfacesTestJSON-87713823 tempest-AttachInterfacesTestJSON-87713823-project-member] Inventory has not changed in ProviderTree for provider: 5a20dc57-fddd-49ec-bab5-953a03eebaa1 {{(pid=61962) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1818.324669] env[61962]: DEBUG nova.scheduler.client.report [None req-9dfce35c-cfdc-4c34-8fd9-1d51d314d579 tempest-AttachInterfacesTestJSON-87713823 tempest-AttachInterfacesTestJSON-87713823-project-member] Inventory has not changed for provider 5a20dc57-fddd-49ec-bab5-953a03eebaa1 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 96, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61962) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1818.341047] env[61962]: DEBUG oslo_concurrency.lockutils [None req-9dfce35c-cfdc-4c34-8fd9-1d51d314d579 tempest-AttachInterfacesTestJSON-87713823 tempest-AttachInterfacesTestJSON-87713823-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 0.300s {{(pid=61962) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1818.341364] env[61962]: ERROR nova.compute.manager [None req-9dfce35c-cfdc-4c34-8fd9-1d51d314d579 tempest-AttachInterfacesTestJSON-87713823 tempest-AttachInterfacesTestJSON-87713823-project-member] [instance: 6f0c884b-afb3-4d06-9dd2-f07099c364e0] Failed to build and run instance: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1818.341364] env[61962]: Faults: ['InvalidArgument'] [ 1818.341364] env[61962]: ERROR nova.compute.manager [instance: 6f0c884b-afb3-4d06-9dd2-f07099c364e0] Traceback (most recent call last): [ 1818.341364] env[61962]: ERROR nova.compute.manager [instance: 6f0c884b-afb3-4d06-9dd2-f07099c364e0] File "/opt/stack/nova/nova/compute/manager.py", line 2633, in _build_and_run_instance [ 1818.341364] env[61962]: ERROR nova.compute.manager [instance: 6f0c884b-afb3-4d06-9dd2-f07099c364e0] self.driver.spawn(context, instance, image_meta, [ 1818.341364] env[61962]: ERROR nova.compute.manager [instance: 6f0c884b-afb3-4d06-9dd2-f07099c364e0] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 1818.341364] env[61962]: ERROR nova.compute.manager [instance: 6f0c884b-afb3-4d06-9dd2-f07099c364e0] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1818.341364] env[61962]: ERROR nova.compute.manager [instance: 6f0c884b-afb3-4d06-9dd2-f07099c364e0] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 1818.341364] env[61962]: ERROR nova.compute.manager [instance: 6f0c884b-afb3-4d06-9dd2-f07099c364e0] self._fetch_image_if_missing(context, vi) [ 1818.341364] env[61962]: ERROR nova.compute.manager [instance: 6f0c884b-afb3-4d06-9dd2-f07099c364e0] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 1818.341364] env[61962]: ERROR nova.compute.manager [instance: 6f0c884b-afb3-4d06-9dd2-f07099c364e0] image_cache(vi, tmp_image_ds_loc) [ 1818.341364] env[61962]: ERROR nova.compute.manager [instance: 6f0c884b-afb3-4d06-9dd2-f07099c364e0] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 1818.341726] env[61962]: ERROR nova.compute.manager [instance: 6f0c884b-afb3-4d06-9dd2-f07099c364e0] vm_util.copy_virtual_disk( [ 1818.341726] env[61962]: ERROR nova.compute.manager [instance: 6f0c884b-afb3-4d06-9dd2-f07099c364e0] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 1818.341726] env[61962]: ERROR nova.compute.manager [instance: 6f0c884b-afb3-4d06-9dd2-f07099c364e0] session._wait_for_task(vmdk_copy_task) [ 1818.341726] env[61962]: ERROR nova.compute.manager [instance: 6f0c884b-afb3-4d06-9dd2-f07099c364e0] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 1818.341726] env[61962]: ERROR nova.compute.manager [instance: 6f0c884b-afb3-4d06-9dd2-f07099c364e0] return self.wait_for_task(task_ref) [ 1818.341726] env[61962]: ERROR nova.compute.manager [instance: 6f0c884b-afb3-4d06-9dd2-f07099c364e0] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 1818.341726] env[61962]: ERROR nova.compute.manager [instance: 6f0c884b-afb3-4d06-9dd2-f07099c364e0] return evt.wait() [ 1818.341726] env[61962]: ERROR nova.compute.manager [instance: 6f0c884b-afb3-4d06-9dd2-f07099c364e0] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 1818.341726] env[61962]: ERROR nova.compute.manager [instance: 6f0c884b-afb3-4d06-9dd2-f07099c364e0] result = hub.switch() [ 1818.341726] env[61962]: ERROR nova.compute.manager [instance: 6f0c884b-afb3-4d06-9dd2-f07099c364e0] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 1818.341726] env[61962]: ERROR nova.compute.manager [instance: 6f0c884b-afb3-4d06-9dd2-f07099c364e0] return self.greenlet.switch() [ 1818.341726] env[61962]: ERROR nova.compute.manager [instance: 6f0c884b-afb3-4d06-9dd2-f07099c364e0] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 1818.341726] env[61962]: ERROR nova.compute.manager [instance: 6f0c884b-afb3-4d06-9dd2-f07099c364e0] self.f(*self.args, **self.kw) [ 1818.342148] env[61962]: ERROR nova.compute.manager [instance: 6f0c884b-afb3-4d06-9dd2-f07099c364e0] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 1818.342148] env[61962]: ERROR nova.compute.manager [instance: 6f0c884b-afb3-4d06-9dd2-f07099c364e0] raise exceptions.translate_fault(task_info.error) [ 1818.342148] env[61962]: ERROR nova.compute.manager [instance: 6f0c884b-afb3-4d06-9dd2-f07099c364e0] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1818.342148] env[61962]: ERROR nova.compute.manager [instance: 6f0c884b-afb3-4d06-9dd2-f07099c364e0] Faults: ['InvalidArgument'] [ 1818.342148] env[61962]: ERROR nova.compute.manager [instance: 6f0c884b-afb3-4d06-9dd2-f07099c364e0] [ 1818.342602] env[61962]: DEBUG nova.compute.utils [None req-9dfce35c-cfdc-4c34-8fd9-1d51d314d579 tempest-AttachInterfacesTestJSON-87713823 tempest-AttachInterfacesTestJSON-87713823-project-member] [instance: 6f0c884b-afb3-4d06-9dd2-f07099c364e0] VimFaultException {{(pid=61962) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 1818.343607] env[61962]: DEBUG nova.compute.manager [None req-9dfce35c-cfdc-4c34-8fd9-1d51d314d579 tempest-AttachInterfacesTestJSON-87713823 tempest-AttachInterfacesTestJSON-87713823-project-member] [instance: 6f0c884b-afb3-4d06-9dd2-f07099c364e0] Build of instance 6f0c884b-afb3-4d06-9dd2-f07099c364e0 was re-scheduled: A specified parameter was not correct: fileType [ 1818.343607] env[61962]: Faults: ['InvalidArgument'] {{(pid=61962) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2472}} [ 1818.343997] env[61962]: DEBUG nova.compute.manager [None req-9dfce35c-cfdc-4c34-8fd9-1d51d314d579 tempest-AttachInterfacesTestJSON-87713823 tempest-AttachInterfacesTestJSON-87713823-project-member] [instance: 6f0c884b-afb3-4d06-9dd2-f07099c364e0] Unplugging VIFs for instance {{(pid=61962) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:2998}} [ 1818.344187] env[61962]: DEBUG nova.compute.manager [None req-9dfce35c-cfdc-4c34-8fd9-1d51d314d579 tempest-AttachInterfacesTestJSON-87713823 tempest-AttachInterfacesTestJSON-87713823-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=61962) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3021}} [ 1818.344360] env[61962]: DEBUG nova.compute.manager [None req-9dfce35c-cfdc-4c34-8fd9-1d51d314d579 tempest-AttachInterfacesTestJSON-87713823 tempest-AttachInterfacesTestJSON-87713823-project-member] [instance: 6f0c884b-afb3-4d06-9dd2-f07099c364e0] Deallocating network for instance {{(pid=61962) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2281}} [ 1818.344520] env[61962]: DEBUG nova.network.neutron [None req-9dfce35c-cfdc-4c34-8fd9-1d51d314d579 tempest-AttachInterfacesTestJSON-87713823 tempest-AttachInterfacesTestJSON-87713823-project-member] [instance: 6f0c884b-afb3-4d06-9dd2-f07099c364e0] deallocate_for_instance() {{(pid=61962) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1818.721031] env[61962]: DEBUG nova.network.neutron [None req-9dfce35c-cfdc-4c34-8fd9-1d51d314d579 tempest-AttachInterfacesTestJSON-87713823 tempest-AttachInterfacesTestJSON-87713823-project-member] [instance: 6f0c884b-afb3-4d06-9dd2-f07099c364e0] Updating instance_info_cache with network_info: [] {{(pid=61962) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1818.732914] env[61962]: INFO nova.compute.manager [None req-9dfce35c-cfdc-4c34-8fd9-1d51d314d579 tempest-AttachInterfacesTestJSON-87713823 tempest-AttachInterfacesTestJSON-87713823-project-member] [instance: 6f0c884b-afb3-4d06-9dd2-f07099c364e0] Took 0.39 seconds to deallocate network for instance. [ 1818.826231] env[61962]: INFO nova.scheduler.client.report [None req-9dfce35c-cfdc-4c34-8fd9-1d51d314d579 tempest-AttachInterfacesTestJSON-87713823 tempest-AttachInterfacesTestJSON-87713823-project-member] Deleted allocations for instance 6f0c884b-afb3-4d06-9dd2-f07099c364e0 [ 1818.849139] env[61962]: DEBUG oslo_concurrency.lockutils [None req-9dfce35c-cfdc-4c34-8fd9-1d51d314d579 tempest-AttachInterfacesTestJSON-87713823 tempest-AttachInterfacesTestJSON-87713823-project-member] Lock "6f0c884b-afb3-4d06-9dd2-f07099c364e0" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 624.780s {{(pid=61962) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1818.849396] env[61962]: DEBUG oslo_concurrency.lockutils [None req-bc1071d0-f834-436e-a83a-5da09b6b7428 tempest-AttachInterfacesTestJSON-87713823 tempest-AttachInterfacesTestJSON-87713823-project-member] Lock "6f0c884b-afb3-4d06-9dd2-f07099c364e0" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 428.625s {{(pid=61962) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1818.849643] env[61962]: DEBUG oslo_concurrency.lockutils [None req-bc1071d0-f834-436e-a83a-5da09b6b7428 tempest-AttachInterfacesTestJSON-87713823 tempest-AttachInterfacesTestJSON-87713823-project-member] Acquiring lock "6f0c884b-afb3-4d06-9dd2-f07099c364e0-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=61962) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1818.849856] env[61962]: DEBUG oslo_concurrency.lockutils [None req-bc1071d0-f834-436e-a83a-5da09b6b7428 tempest-AttachInterfacesTestJSON-87713823 tempest-AttachInterfacesTestJSON-87713823-project-member] Lock "6f0c884b-afb3-4d06-9dd2-f07099c364e0-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=61962) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1818.850059] env[61962]: DEBUG oslo_concurrency.lockutils [None req-bc1071d0-f834-436e-a83a-5da09b6b7428 tempest-AttachInterfacesTestJSON-87713823 tempest-AttachInterfacesTestJSON-87713823-project-member] Lock "6f0c884b-afb3-4d06-9dd2-f07099c364e0-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=61962) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1818.852292] env[61962]: INFO nova.compute.manager [None req-bc1071d0-f834-436e-a83a-5da09b6b7428 tempest-AttachInterfacesTestJSON-87713823 tempest-AttachInterfacesTestJSON-87713823-project-member] [instance: 6f0c884b-afb3-4d06-9dd2-f07099c364e0] Terminating instance [ 1818.854238] env[61962]: DEBUG nova.compute.manager [None req-bc1071d0-f834-436e-a83a-5da09b6b7428 tempest-AttachInterfacesTestJSON-87713823 tempest-AttachInterfacesTestJSON-87713823-project-member] [instance: 6f0c884b-afb3-4d06-9dd2-f07099c364e0] Start destroying the instance on the hypervisor. {{(pid=61962) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3142}} [ 1818.854445] env[61962]: DEBUG nova.virt.vmwareapi.vmops [None req-bc1071d0-f834-436e-a83a-5da09b6b7428 tempest-AttachInterfacesTestJSON-87713823 tempest-AttachInterfacesTestJSON-87713823-project-member] [instance: 6f0c884b-afb3-4d06-9dd2-f07099c364e0] Destroying instance {{(pid=61962) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1818.855043] env[61962]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-88e4455f-a25d-456c-aedf-c16efc8cd7ff {{(pid=61962) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1818.865660] env[61962]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ef6ac495-9b58-4a8d-91a4-ce58c5c59887 {{(pid=61962) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1818.897193] env[61962]: WARNING nova.virt.vmwareapi.vmops [None req-bc1071d0-f834-436e-a83a-5da09b6b7428 tempest-AttachInterfacesTestJSON-87713823 tempest-AttachInterfacesTestJSON-87713823-project-member] [instance: 6f0c884b-afb3-4d06-9dd2-f07099c364e0] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 6f0c884b-afb3-4d06-9dd2-f07099c364e0 could not be found. [ 1818.897435] env[61962]: DEBUG nova.virt.vmwareapi.vmops [None req-bc1071d0-f834-436e-a83a-5da09b6b7428 tempest-AttachInterfacesTestJSON-87713823 tempest-AttachInterfacesTestJSON-87713823-project-member] [instance: 6f0c884b-afb3-4d06-9dd2-f07099c364e0] Instance destroyed {{(pid=61962) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1818.897631] env[61962]: INFO nova.compute.manager [None req-bc1071d0-f834-436e-a83a-5da09b6b7428 tempest-AttachInterfacesTestJSON-87713823 tempest-AttachInterfacesTestJSON-87713823-project-member] [instance: 6f0c884b-afb3-4d06-9dd2-f07099c364e0] Took 0.04 seconds to destroy the instance on the hypervisor. [ 1818.897903] env[61962]: DEBUG oslo.service.loopingcall [None req-bc1071d0-f834-436e-a83a-5da09b6b7428 tempest-AttachInterfacesTestJSON-87713823 tempest-AttachInterfacesTestJSON-87713823-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61962) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1818.898180] env[61962]: DEBUG nova.compute.manager [-] [instance: 6f0c884b-afb3-4d06-9dd2-f07099c364e0] Deallocating network for instance {{(pid=61962) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2281}} [ 1818.898276] env[61962]: DEBUG nova.network.neutron [-] [instance: 6f0c884b-afb3-4d06-9dd2-f07099c364e0] deallocate_for_instance() {{(pid=61962) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1818.926323] env[61962]: DEBUG nova.network.neutron [-] [instance: 6f0c884b-afb3-4d06-9dd2-f07099c364e0] Updating instance_info_cache with network_info: [] {{(pid=61962) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1818.934847] env[61962]: INFO nova.compute.manager [-] [instance: 6f0c884b-afb3-4d06-9dd2-f07099c364e0] Took 0.04 seconds to deallocate network for instance. [ 1819.058213] env[61962]: DEBUG oslo_concurrency.lockutils [None req-bc1071d0-f834-436e-a83a-5da09b6b7428 tempest-AttachInterfacesTestJSON-87713823 tempest-AttachInterfacesTestJSON-87713823-project-member] Lock "6f0c884b-afb3-4d06-9dd2-f07099c364e0" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 0.209s {{(pid=61962) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1819.059092] env[61962]: DEBUG oslo_concurrency.lockutils [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Lock "6f0c884b-afb3-4d06-9dd2-f07099c364e0" acquired by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: waited 81.541s {{(pid=61962) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1819.059300] env[61962]: INFO nova.compute.manager [None req-ba337279-4320-40ca-b616-7e590432f203 None None] [instance: 6f0c884b-afb3-4d06-9dd2-f07099c364e0] During sync_power_state the instance has a pending task (deleting). Skip. [ 1819.059472] env[61962]: DEBUG oslo_concurrency.lockutils [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Lock "6f0c884b-afb3-4d06-9dd2-f07099c364e0" "released" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: held 0.000s {{(pid=61962) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1829.159275] env[61962]: DEBUG oslo_service.periodic_task [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Running periodic task ComputeManager.update_available_resource {{(pid=61962) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1829.172036] env[61962]: DEBUG oslo_concurrency.lockutils [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=61962) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1829.172288] env[61962]: DEBUG oslo_concurrency.lockutils [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=61962) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1829.172468] env[61962]: DEBUG oslo_concurrency.lockutils [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=61962) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1829.172629] env[61962]: DEBUG nova.compute.resource_tracker [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=61962) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 1829.173784] env[61962]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b5ec2410-edff-4986-8bc2-0987a4899cad {{(pid=61962) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1829.183284] env[61962]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0a173b68-2643-4b6e-aa81-62cfd8c978bf {{(pid=61962) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1829.198494] env[61962]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0ad93f9c-e0be-40c0-84ea-2d94fed59d91 {{(pid=61962) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1829.205700] env[61962]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ecced819-4d0f-478f-bc4e-e4868cfa4595 {{(pid=61962) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1829.234613] env[61962]: DEBUG nova.compute.resource_tracker [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=180559MB free_disk=96GB free_vcpus=48 pci_devices=None {{(pid=61962) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 1829.234802] env[61962]: DEBUG oslo_concurrency.lockutils [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=61962) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1829.235010] env[61962]: DEBUG oslo_concurrency.lockutils [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=61962) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1829.320293] env[61962]: DEBUG nova.compute.resource_tracker [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Instance 3d2749f8-18d2-4631-adc2-093988125eb4 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61962) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 1829.320293] env[61962]: DEBUG nova.compute.resource_tracker [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Instance 1035139f-2193-4d1c-a220-369ef6096ae0 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61962) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 1829.320293] env[61962]: DEBUG nova.compute.resource_tracker [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Instance 53048279-c3ea-484f-9a3e-049b12892f7a actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61962) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 1829.320503] env[61962]: DEBUG nova.compute.resource_tracker [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Instance 25f6fa54-3d0d-4470-b5c9-06ba0ad2663d actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61962) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 1829.320503] env[61962]: DEBUG nova.compute.resource_tracker [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Instance 1e71cc99-f026-4197-b7f1-ae18f7f99af6 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61962) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 1829.320647] env[61962]: DEBUG nova.compute.resource_tracker [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Instance 2cf5409f-eaff-4b63-b8bb-eb15a7a81760 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61962) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 1829.320753] env[61962]: DEBUG nova.compute.resource_tracker [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Instance 02c198ea-61de-4e70-866c-39781d353d04 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61962) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 1829.320863] env[61962]: DEBUG nova.compute.resource_tracker [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Instance 85003695-cb3e-4bce-9a3a-a1d799b36369 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61962) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 1829.321094] env[61962]: DEBUG nova.compute.resource_tracker [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Total usable vcpus: 48, total allocated vcpus: 8 {{(pid=61962) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 1829.321255] env[61962]: DEBUG nova.compute.resource_tracker [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=1536MB phys_disk=100GB used_disk=8GB total_vcpus=48 used_vcpus=8 pci_stats=[] stats={'failed_builds': '79', 'num_instances': '8', 'num_vm_building': '8', 'num_task_deleting': '6', 'num_os_type_None': '8', 'num_proj_34477c98bf9c4a87971a62cb96b072c7': '1', 'io_workload': '8', 'num_proj_e58b68272c404983993a09ad12e8c03b': '2', 'num_proj_88a7b37d54f047149b05fc80e0b51601': '1', 'num_proj_40bdd99e47aa40a28edd38c6b4a09a5f': '1', 'num_task_spawning': '2', 'num_proj_56bb35c701074f02820d93ad5911d089': '2', 'num_proj_cc34536d51ef4a22aef0b274ba5aa28e': '1'} {{(pid=61962) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 1829.427722] env[61962]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-00c7e29c-d86b-4dee-ac2c-0833acdec5b8 {{(pid=61962) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1829.435079] env[61962]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1f090e67-6d99-4b7f-b1d7-c90b2884eab1 {{(pid=61962) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1829.464752] env[61962]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-83e8b964-73f7-4b9b-afca-c71b27532821 {{(pid=61962) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1829.472633] env[61962]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-476511e6-2a97-4062-bf75-755a6d61414b {{(pid=61962) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1829.486483] env[61962]: DEBUG nova.compute.provider_tree [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Inventory has not changed in ProviderTree for provider: 5a20dc57-fddd-49ec-bab5-953a03eebaa1 {{(pid=61962) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1829.495418] env[61962]: DEBUG nova.scheduler.client.report [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Inventory has not changed for provider 5a20dc57-fddd-49ec-bab5-953a03eebaa1 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 96, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61962) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1829.512364] env[61962]: DEBUG nova.compute.resource_tracker [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=61962) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 1829.512876] env[61962]: DEBUG oslo_concurrency.lockutils [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 0.278s {{(pid=61962) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1833.512254] env[61962]: DEBUG oslo_service.periodic_task [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=61962) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1833.512643] env[61962]: DEBUG nova.compute.manager [None req-ba337279-4320-40ca-b616-7e590432f203 None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=61962) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10626}} [ 1837.153910] env[61962]: DEBUG oslo_service.periodic_task [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=61962) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1841.159488] env[61962]: DEBUG oslo_service.periodic_task [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=61962) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1841.159892] env[61962]: DEBUG oslo_service.periodic_task [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=61962) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1842.159684] env[61962]: DEBUG oslo_service.periodic_task [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=61962) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1843.154118] env[61962]: DEBUG oslo_service.periodic_task [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Running periodic task ComputeManager._sync_scheduler_instance_info {{(pid=61962) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1843.174247] env[61962]: DEBUG oslo_service.periodic_task [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=61962) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1843.174615] env[61962]: DEBUG nova.compute.manager [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Starting heal instance info cache {{(pid=61962) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10007}} [ 1843.174615] env[61962]: DEBUG nova.compute.manager [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Rebuilding the list of instances to heal {{(pid=61962) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10011}} [ 1843.192700] env[61962]: DEBUG nova.compute.manager [None req-ba337279-4320-40ca-b616-7e590432f203 None None] [instance: 3d2749f8-18d2-4631-adc2-093988125eb4] Skipping network cache update for instance because it is Building. {{(pid=61962) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10020}} [ 1843.192864] env[61962]: DEBUG nova.compute.manager [None req-ba337279-4320-40ca-b616-7e590432f203 None None] [instance: 1035139f-2193-4d1c-a220-369ef6096ae0] Skipping network cache update for instance because it is Building. {{(pid=61962) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10020}} [ 1843.193010] env[61962]: DEBUG nova.compute.manager [None req-ba337279-4320-40ca-b616-7e590432f203 None None] [instance: 53048279-c3ea-484f-9a3e-049b12892f7a] Skipping network cache update for instance because it is Building. {{(pid=61962) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10020}} [ 1843.193149] env[61962]: DEBUG nova.compute.manager [None req-ba337279-4320-40ca-b616-7e590432f203 None None] [instance: 25f6fa54-3d0d-4470-b5c9-06ba0ad2663d] Skipping network cache update for instance because it is Building. {{(pid=61962) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10020}} [ 1843.193279] env[61962]: DEBUG nova.compute.manager [None req-ba337279-4320-40ca-b616-7e590432f203 None None] [instance: 1e71cc99-f026-4197-b7f1-ae18f7f99af6] Skipping network cache update for instance because it is Building. {{(pid=61962) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10020}} [ 1843.193410] env[61962]: DEBUG nova.compute.manager [None req-ba337279-4320-40ca-b616-7e590432f203 None None] [instance: 2cf5409f-eaff-4b63-b8bb-eb15a7a81760] Skipping network cache update for instance because it is Building. {{(pid=61962) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10020}} [ 1843.193529] env[61962]: DEBUG nova.compute.manager [None req-ba337279-4320-40ca-b616-7e590432f203 None None] [instance: 02c198ea-61de-4e70-866c-39781d353d04] Skipping network cache update for instance because it is Building. {{(pid=61962) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10020}} [ 1843.193649] env[61962]: DEBUG nova.compute.manager [None req-ba337279-4320-40ca-b616-7e590432f203 None None] [instance: 85003695-cb3e-4bce-9a3a-a1d799b36369] Skipping network cache update for instance because it is Building. {{(pid=61962) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10020}} [ 1843.193769] env[61962]: DEBUG nova.compute.manager [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Didn't find any instances for network info cache update. {{(pid=61962) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10093}} [ 1846.159506] env[61962]: DEBUG oslo_service.periodic_task [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=61962) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1847.158429] env[61962]: DEBUG oslo_service.periodic_task [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=61962) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1847.234250] env[61962]: DEBUG oslo_concurrency.lockutils [None req-0592c39d-dc1b-4980-a48c-92d3fed7e33c tempest-ListServerFiltersTestJSON-924365092 tempest-ListServerFiltersTestJSON-924365092-project-member] Acquiring lock "1e71cc99-f026-4197-b7f1-ae18f7f99af6" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=61962) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1866.835539] env[61962]: WARNING oslo_vmware.rw_handles [None req-ae999722-5278-4a0e-8508-ee195677c4b3 tempest-ServerMetadataTestJSON-1606436818 tempest-ServerMetadataTestJSON-1606436818-project-member] Error occurred while reading the HTTP response.: http.client.RemoteDisconnected: Remote end closed connection without response [ 1866.835539] env[61962]: ERROR oslo_vmware.rw_handles Traceback (most recent call last): [ 1866.835539] env[61962]: ERROR oslo_vmware.rw_handles File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py", line 283, in close [ 1866.835539] env[61962]: ERROR oslo_vmware.rw_handles self._conn.getresponse() [ 1866.835539] env[61962]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 1375, in getresponse [ 1866.835539] env[61962]: ERROR oslo_vmware.rw_handles response.begin() [ 1866.835539] env[61962]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 318, in begin [ 1866.835539] env[61962]: ERROR oslo_vmware.rw_handles version, status, reason = self._read_status() [ 1866.835539] env[61962]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 287, in _read_status [ 1866.835539] env[61962]: ERROR oslo_vmware.rw_handles raise RemoteDisconnected("Remote end closed connection without" [ 1866.835539] env[61962]: ERROR oslo_vmware.rw_handles http.client.RemoteDisconnected: Remote end closed connection without response [ 1866.835539] env[61962]: ERROR oslo_vmware.rw_handles [ 1866.836599] env[61962]: DEBUG nova.virt.vmwareapi.images [None req-ae999722-5278-4a0e-8508-ee195677c4b3 tempest-ServerMetadataTestJSON-1606436818 tempest-ServerMetadataTestJSON-1606436818-project-member] [instance: 3d2749f8-18d2-4631-adc2-093988125eb4] Downloaded image file data f684bb17-3ab2-4bd5-a19e-4c36c57d0ebe to vmware_temp/c8ff9423-1d4e-4688-b438-d2a776aed13f/f684bb17-3ab2-4bd5-a19e-4c36c57d0ebe/tmp-sparse.vmdk on the data store datastore2 {{(pid=61962) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:258}} [ 1866.838160] env[61962]: DEBUG nova.virt.vmwareapi.vmops [None req-ae999722-5278-4a0e-8508-ee195677c4b3 tempest-ServerMetadataTestJSON-1606436818 tempest-ServerMetadataTestJSON-1606436818-project-member] [instance: 3d2749f8-18d2-4631-adc2-093988125eb4] Caching image {{(pid=61962) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 1866.838407] env[61962]: DEBUG nova.virt.vmwareapi.vm_util [None req-ae999722-5278-4a0e-8508-ee195677c4b3 tempest-ServerMetadataTestJSON-1606436818 tempest-ServerMetadataTestJSON-1606436818-project-member] Copying Virtual Disk [datastore2] vmware_temp/c8ff9423-1d4e-4688-b438-d2a776aed13f/f684bb17-3ab2-4bd5-a19e-4c36c57d0ebe/tmp-sparse.vmdk to [datastore2] vmware_temp/c8ff9423-1d4e-4688-b438-d2a776aed13f/f684bb17-3ab2-4bd5-a19e-4c36c57d0ebe/f684bb17-3ab2-4bd5-a19e-4c36c57d0ebe.vmdk {{(pid=61962) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 1866.838706] env[61962]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-468089a9-4f58-4b43-92e4-fd8263654722 {{(pid=61962) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1866.846676] env[61962]: DEBUG oslo_vmware.api [None req-ae999722-5278-4a0e-8508-ee195677c4b3 tempest-ServerMetadataTestJSON-1606436818 tempest-ServerMetadataTestJSON-1606436818-project-member] Waiting for the task: (returnval){ [ 1866.846676] env[61962]: value = "task-4892083" [ 1866.846676] env[61962]: _type = "Task" [ 1866.846676] env[61962]: } to complete. {{(pid=61962) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1866.855088] env[61962]: DEBUG oslo_vmware.api [None req-ae999722-5278-4a0e-8508-ee195677c4b3 tempest-ServerMetadataTestJSON-1606436818 tempest-ServerMetadataTestJSON-1606436818-project-member] Task: {'id': task-4892083, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61962) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1867.357568] env[61962]: DEBUG oslo_vmware.exceptions [None req-ae999722-5278-4a0e-8508-ee195677c4b3 tempest-ServerMetadataTestJSON-1606436818 tempest-ServerMetadataTestJSON-1606436818-project-member] Fault InvalidArgument not matched. {{(pid=61962) get_fault_class /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/exceptions.py:290}} [ 1867.357868] env[61962]: DEBUG oslo_concurrency.lockutils [None req-ae999722-5278-4a0e-8508-ee195677c4b3 tempest-ServerMetadataTestJSON-1606436818 tempest-ServerMetadataTestJSON-1606436818-project-member] Releasing lock "[datastore2] devstack-image-cache_base/f684bb17-3ab2-4bd5-a19e-4c36c57d0ebe/f684bb17-3ab2-4bd5-a19e-4c36c57d0ebe.vmdk" {{(pid=61962) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1867.358440] env[61962]: ERROR nova.compute.manager [None req-ae999722-5278-4a0e-8508-ee195677c4b3 tempest-ServerMetadataTestJSON-1606436818 tempest-ServerMetadataTestJSON-1606436818-project-member] [instance: 3d2749f8-18d2-4631-adc2-093988125eb4] Instance failed to spawn: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1867.358440] env[61962]: Faults: ['InvalidArgument'] [ 1867.358440] env[61962]: ERROR nova.compute.manager [instance: 3d2749f8-18d2-4631-adc2-093988125eb4] Traceback (most recent call last): [ 1867.358440] env[61962]: ERROR nova.compute.manager [instance: 3d2749f8-18d2-4631-adc2-093988125eb4] File "/opt/stack/nova/nova/compute/manager.py", line 2886, in _build_resources [ 1867.358440] env[61962]: ERROR nova.compute.manager [instance: 3d2749f8-18d2-4631-adc2-093988125eb4] yield resources [ 1867.358440] env[61962]: ERROR nova.compute.manager [instance: 3d2749f8-18d2-4631-adc2-093988125eb4] File "/opt/stack/nova/nova/compute/manager.py", line 2633, in _build_and_run_instance [ 1867.358440] env[61962]: ERROR nova.compute.manager [instance: 3d2749f8-18d2-4631-adc2-093988125eb4] self.driver.spawn(context, instance, image_meta, [ 1867.358440] env[61962]: ERROR nova.compute.manager [instance: 3d2749f8-18d2-4631-adc2-093988125eb4] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 1867.358440] env[61962]: ERROR nova.compute.manager [instance: 3d2749f8-18d2-4631-adc2-093988125eb4] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1867.358440] env[61962]: ERROR nova.compute.manager [instance: 3d2749f8-18d2-4631-adc2-093988125eb4] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 1867.358440] env[61962]: ERROR nova.compute.manager [instance: 3d2749f8-18d2-4631-adc2-093988125eb4] self._fetch_image_if_missing(context, vi) [ 1867.358440] env[61962]: ERROR nova.compute.manager [instance: 3d2749f8-18d2-4631-adc2-093988125eb4] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 1867.358836] env[61962]: ERROR nova.compute.manager [instance: 3d2749f8-18d2-4631-adc2-093988125eb4] image_cache(vi, tmp_image_ds_loc) [ 1867.358836] env[61962]: ERROR nova.compute.manager [instance: 3d2749f8-18d2-4631-adc2-093988125eb4] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 1867.358836] env[61962]: ERROR nova.compute.manager [instance: 3d2749f8-18d2-4631-adc2-093988125eb4] vm_util.copy_virtual_disk( [ 1867.358836] env[61962]: ERROR nova.compute.manager [instance: 3d2749f8-18d2-4631-adc2-093988125eb4] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 1867.358836] env[61962]: ERROR nova.compute.manager [instance: 3d2749f8-18d2-4631-adc2-093988125eb4] session._wait_for_task(vmdk_copy_task) [ 1867.358836] env[61962]: ERROR nova.compute.manager [instance: 3d2749f8-18d2-4631-adc2-093988125eb4] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 1867.358836] env[61962]: ERROR nova.compute.manager [instance: 3d2749f8-18d2-4631-adc2-093988125eb4] return self.wait_for_task(task_ref) [ 1867.358836] env[61962]: ERROR nova.compute.manager [instance: 3d2749f8-18d2-4631-adc2-093988125eb4] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 1867.358836] env[61962]: ERROR nova.compute.manager [instance: 3d2749f8-18d2-4631-adc2-093988125eb4] return evt.wait() [ 1867.358836] env[61962]: ERROR nova.compute.manager [instance: 3d2749f8-18d2-4631-adc2-093988125eb4] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 1867.358836] env[61962]: ERROR nova.compute.manager [instance: 3d2749f8-18d2-4631-adc2-093988125eb4] result = hub.switch() [ 1867.358836] env[61962]: ERROR nova.compute.manager [instance: 3d2749f8-18d2-4631-adc2-093988125eb4] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 1867.358836] env[61962]: ERROR nova.compute.manager [instance: 3d2749f8-18d2-4631-adc2-093988125eb4] return self.greenlet.switch() [ 1867.359233] env[61962]: ERROR nova.compute.manager [instance: 3d2749f8-18d2-4631-adc2-093988125eb4] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 1867.359233] env[61962]: ERROR nova.compute.manager [instance: 3d2749f8-18d2-4631-adc2-093988125eb4] self.f(*self.args, **self.kw) [ 1867.359233] env[61962]: ERROR nova.compute.manager [instance: 3d2749f8-18d2-4631-adc2-093988125eb4] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 1867.359233] env[61962]: ERROR nova.compute.manager [instance: 3d2749f8-18d2-4631-adc2-093988125eb4] raise exceptions.translate_fault(task_info.error) [ 1867.359233] env[61962]: ERROR nova.compute.manager [instance: 3d2749f8-18d2-4631-adc2-093988125eb4] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1867.359233] env[61962]: ERROR nova.compute.manager [instance: 3d2749f8-18d2-4631-adc2-093988125eb4] Faults: ['InvalidArgument'] [ 1867.359233] env[61962]: ERROR nova.compute.manager [instance: 3d2749f8-18d2-4631-adc2-093988125eb4] [ 1867.359233] env[61962]: INFO nova.compute.manager [None req-ae999722-5278-4a0e-8508-ee195677c4b3 tempest-ServerMetadataTestJSON-1606436818 tempest-ServerMetadataTestJSON-1606436818-project-member] [instance: 3d2749f8-18d2-4631-adc2-093988125eb4] Terminating instance [ 1867.360302] env[61962]: DEBUG oslo_concurrency.lockutils [None req-b03c4c30-9fd6-4923-9e36-c3edd6c4e2a5 tempest-ImagesTestJSON-240028984 tempest-ImagesTestJSON-240028984-project-member] Acquired lock "[datastore2] devstack-image-cache_base/f684bb17-3ab2-4bd5-a19e-4c36c57d0ebe/f684bb17-3ab2-4bd5-a19e-4c36c57d0ebe.vmdk" {{(pid=61962) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1867.360501] env[61962]: DEBUG nova.virt.vmwareapi.ds_util [None req-b03c4c30-9fd6-4923-9e36-c3edd6c4e2a5 tempest-ImagesTestJSON-240028984 tempest-ImagesTestJSON-240028984-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=61962) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1867.360733] env[61962]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-31503eac-936a-4d76-bb6d-b22fb9297989 {{(pid=61962) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1867.362880] env[61962]: DEBUG nova.compute.manager [None req-ae999722-5278-4a0e-8508-ee195677c4b3 tempest-ServerMetadataTestJSON-1606436818 tempest-ServerMetadataTestJSON-1606436818-project-member] [instance: 3d2749f8-18d2-4631-adc2-093988125eb4] Start destroying the instance on the hypervisor. {{(pid=61962) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3142}} [ 1867.363089] env[61962]: DEBUG nova.virt.vmwareapi.vmops [None req-ae999722-5278-4a0e-8508-ee195677c4b3 tempest-ServerMetadataTestJSON-1606436818 tempest-ServerMetadataTestJSON-1606436818-project-member] [instance: 3d2749f8-18d2-4631-adc2-093988125eb4] Destroying instance {{(pid=61962) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1867.363793] env[61962]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-813e84c0-6ea1-4089-b7aa-1c648b241cc9 {{(pid=61962) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1867.371474] env[61962]: DEBUG nova.virt.vmwareapi.vmops [None req-ae999722-5278-4a0e-8508-ee195677c4b3 tempest-ServerMetadataTestJSON-1606436818 tempest-ServerMetadataTestJSON-1606436818-project-member] [instance: 3d2749f8-18d2-4631-adc2-093988125eb4] Unregistering the VM {{(pid=61962) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 1867.371707] env[61962]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-e30001d9-75e3-47fc-90d1-75d739246508 {{(pid=61962) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1867.373919] env[61962]: DEBUG nova.virt.vmwareapi.ds_util [None req-b03c4c30-9fd6-4923-9e36-c3edd6c4e2a5 tempest-ImagesTestJSON-240028984 tempest-ImagesTestJSON-240028984-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=61962) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1867.374072] env[61962]: DEBUG nova.virt.vmwareapi.vmops [None req-b03c4c30-9fd6-4923-9e36-c3edd6c4e2a5 tempest-ImagesTestJSON-240028984 tempest-ImagesTestJSON-240028984-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=61962) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 1867.375063] env[61962]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-d7397c57-4fc9-4537-8f18-9a04a205c1d0 {{(pid=61962) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1867.380086] env[61962]: DEBUG oslo_vmware.api [None req-b03c4c30-9fd6-4923-9e36-c3edd6c4e2a5 tempest-ImagesTestJSON-240028984 tempest-ImagesTestJSON-240028984-project-member] Waiting for the task: (returnval){ [ 1867.380086] env[61962]: value = "session[5210918f-aadc-9b29-42fa-0929c2e42627]523060d1-90c0-c060-5abf-9c068bd64819" [ 1867.380086] env[61962]: _type = "Task" [ 1867.380086] env[61962]: } to complete. {{(pid=61962) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1867.388154] env[61962]: DEBUG oslo_vmware.api [None req-b03c4c30-9fd6-4923-9e36-c3edd6c4e2a5 tempest-ImagesTestJSON-240028984 tempest-ImagesTestJSON-240028984-project-member] Task: {'id': session[5210918f-aadc-9b29-42fa-0929c2e42627]523060d1-90c0-c060-5abf-9c068bd64819, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61962) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1867.447082] env[61962]: DEBUG nova.virt.vmwareapi.vmops [None req-ae999722-5278-4a0e-8508-ee195677c4b3 tempest-ServerMetadataTestJSON-1606436818 tempest-ServerMetadataTestJSON-1606436818-project-member] [instance: 3d2749f8-18d2-4631-adc2-093988125eb4] Unregistered the VM {{(pid=61962) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 1867.447304] env[61962]: DEBUG nova.virt.vmwareapi.vmops [None req-ae999722-5278-4a0e-8508-ee195677c4b3 tempest-ServerMetadataTestJSON-1606436818 tempest-ServerMetadataTestJSON-1606436818-project-member] [instance: 3d2749f8-18d2-4631-adc2-093988125eb4] Deleting contents of the VM from datastore datastore2 {{(pid=61962) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 1867.447485] env[61962]: DEBUG nova.virt.vmwareapi.ds_util [None req-ae999722-5278-4a0e-8508-ee195677c4b3 tempest-ServerMetadataTestJSON-1606436818 tempest-ServerMetadataTestJSON-1606436818-project-member] Deleting the datastore file [datastore2] 3d2749f8-18d2-4631-adc2-093988125eb4 {{(pid=61962) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1867.447760] env[61962]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-6328e94d-f7c8-4e9c-93e7-8cca82a433d2 {{(pid=61962) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1867.454023] env[61962]: DEBUG oslo_vmware.api [None req-ae999722-5278-4a0e-8508-ee195677c4b3 tempest-ServerMetadataTestJSON-1606436818 tempest-ServerMetadataTestJSON-1606436818-project-member] Waiting for the task: (returnval){ [ 1867.454023] env[61962]: value = "task-4892085" [ 1867.454023] env[61962]: _type = "Task" [ 1867.454023] env[61962]: } to complete. {{(pid=61962) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1867.462010] env[61962]: DEBUG oslo_vmware.api [None req-ae999722-5278-4a0e-8508-ee195677c4b3 tempest-ServerMetadataTestJSON-1606436818 tempest-ServerMetadataTestJSON-1606436818-project-member] Task: {'id': task-4892085, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61962) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1867.891792] env[61962]: DEBUG nova.virt.vmwareapi.vmops [None req-b03c4c30-9fd6-4923-9e36-c3edd6c4e2a5 tempest-ImagesTestJSON-240028984 tempest-ImagesTestJSON-240028984-project-member] [instance: 1035139f-2193-4d1c-a220-369ef6096ae0] Preparing fetch location {{(pid=61962) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 1867.892314] env[61962]: DEBUG nova.virt.vmwareapi.ds_util [None req-b03c4c30-9fd6-4923-9e36-c3edd6c4e2a5 tempest-ImagesTestJSON-240028984 tempest-ImagesTestJSON-240028984-project-member] Creating directory with path [datastore2] vmware_temp/f27edca6-1304-4f0d-9e18-e2527c3d2f88/f684bb17-3ab2-4bd5-a19e-4c36c57d0ebe {{(pid=61962) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1867.892314] env[61962]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-9330f695-c145-443c-a992-6b479c61b4ce {{(pid=61962) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1867.909391] env[61962]: DEBUG nova.virt.vmwareapi.ds_util [None req-b03c4c30-9fd6-4923-9e36-c3edd6c4e2a5 tempest-ImagesTestJSON-240028984 tempest-ImagesTestJSON-240028984-project-member] Created directory with path [datastore2] vmware_temp/f27edca6-1304-4f0d-9e18-e2527c3d2f88/f684bb17-3ab2-4bd5-a19e-4c36c57d0ebe {{(pid=61962) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1867.909631] env[61962]: DEBUG nova.virt.vmwareapi.vmops [None req-b03c4c30-9fd6-4923-9e36-c3edd6c4e2a5 tempest-ImagesTestJSON-240028984 tempest-ImagesTestJSON-240028984-project-member] [instance: 1035139f-2193-4d1c-a220-369ef6096ae0] Fetch image to [datastore2] vmware_temp/f27edca6-1304-4f0d-9e18-e2527c3d2f88/f684bb17-3ab2-4bd5-a19e-4c36c57d0ebe/tmp-sparse.vmdk {{(pid=61962) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 1867.909720] env[61962]: DEBUG nova.virt.vmwareapi.vmops [None req-b03c4c30-9fd6-4923-9e36-c3edd6c4e2a5 tempest-ImagesTestJSON-240028984 tempest-ImagesTestJSON-240028984-project-member] [instance: 1035139f-2193-4d1c-a220-369ef6096ae0] Downloading image file data f684bb17-3ab2-4bd5-a19e-4c36c57d0ebe to [datastore2] vmware_temp/f27edca6-1304-4f0d-9e18-e2527c3d2f88/f684bb17-3ab2-4bd5-a19e-4c36c57d0ebe/tmp-sparse.vmdk on the data store datastore2 {{(pid=61962) _fetch_image_as_file /opt/stack/nova/nova/virt/vmwareapi/vmops.py:399}} [ 1867.910494] env[61962]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-25cbb755-76ab-4ee6-b574-be5a6d4cb3e3 {{(pid=61962) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1867.917403] env[61962]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-73e4388b-5098-4e59-9a88-fec48b27ad25 {{(pid=61962) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1867.926473] env[61962]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2648f3e5-d277-44af-89af-f924d03ce927 {{(pid=61962) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1867.962087] env[61962]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-07e6f1b0-82a8-430c-93dd-577df39ab341 {{(pid=61962) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1867.971218] env[61962]: DEBUG oslo_vmware.service [-] Invoking SessionManager.AcquireGenericServiceTicket with opID=oslo.vmware-c99481af-8c93-45cf-b08b-9a3b8c0771b4 {{(pid=61962) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1867.973010] env[61962]: DEBUG oslo_vmware.api [None req-ae999722-5278-4a0e-8508-ee195677c4b3 tempest-ServerMetadataTestJSON-1606436818 tempest-ServerMetadataTestJSON-1606436818-project-member] Task: {'id': task-4892085, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.115763} completed successfully. {{(pid=61962) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1867.973300] env[61962]: DEBUG nova.virt.vmwareapi.ds_util [None req-ae999722-5278-4a0e-8508-ee195677c4b3 tempest-ServerMetadataTestJSON-1606436818 tempest-ServerMetadataTestJSON-1606436818-project-member] Deleted the datastore file {{(pid=61962) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1867.973487] env[61962]: DEBUG nova.virt.vmwareapi.vmops [None req-ae999722-5278-4a0e-8508-ee195677c4b3 tempest-ServerMetadataTestJSON-1606436818 tempest-ServerMetadataTestJSON-1606436818-project-member] [instance: 3d2749f8-18d2-4631-adc2-093988125eb4] Deleted contents of the VM from datastore datastore2 {{(pid=61962) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 1867.973654] env[61962]: DEBUG nova.virt.vmwareapi.vmops [None req-ae999722-5278-4a0e-8508-ee195677c4b3 tempest-ServerMetadataTestJSON-1606436818 tempest-ServerMetadataTestJSON-1606436818-project-member] [instance: 3d2749f8-18d2-4631-adc2-093988125eb4] Instance destroyed {{(pid=61962) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1867.973830] env[61962]: INFO nova.compute.manager [None req-ae999722-5278-4a0e-8508-ee195677c4b3 tempest-ServerMetadataTestJSON-1606436818 tempest-ServerMetadataTestJSON-1606436818-project-member] [instance: 3d2749f8-18d2-4631-adc2-093988125eb4] Took 0.61 seconds to destroy the instance on the hypervisor. [ 1867.975916] env[61962]: DEBUG nova.compute.claims [None req-ae999722-5278-4a0e-8508-ee195677c4b3 tempest-ServerMetadataTestJSON-1606436818 tempest-ServerMetadataTestJSON-1606436818-project-member] [instance: 3d2749f8-18d2-4631-adc2-093988125eb4] Aborting claim: {{(pid=61962) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 1867.976125] env[61962]: DEBUG oslo_concurrency.lockutils [None req-ae999722-5278-4a0e-8508-ee195677c4b3 tempest-ServerMetadataTestJSON-1606436818 tempest-ServerMetadataTestJSON-1606436818-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=61962) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1867.976330] env[61962]: DEBUG oslo_concurrency.lockutils [None req-ae999722-5278-4a0e-8508-ee195677c4b3 tempest-ServerMetadataTestJSON-1606436818 tempest-ServerMetadataTestJSON-1606436818-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 0.000s {{(pid=61962) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1867.994886] env[61962]: DEBUG nova.virt.vmwareapi.images [None req-b03c4c30-9fd6-4923-9e36-c3edd6c4e2a5 tempest-ImagesTestJSON-240028984 tempest-ImagesTestJSON-240028984-project-member] [instance: 1035139f-2193-4d1c-a220-369ef6096ae0] Downloading image file data f684bb17-3ab2-4bd5-a19e-4c36c57d0ebe to the data store datastore2 {{(pid=61962) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:245}} [ 1868.062252] env[61962]: DEBUG oslo_vmware.rw_handles [None req-b03c4c30-9fd6-4923-9e36-c3edd6c4e2a5 tempest-ImagesTestJSON-240028984 tempest-ImagesTestJSON-240028984-project-member] Creating HTTP connection to write to file with size = 21318656 and URL = https://esx7c1n1.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/f27edca6-1304-4f0d-9e18-e2527c3d2f88/f684bb17-3ab2-4bd5-a19e-4c36c57d0ebe/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=61962) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 1868.124027] env[61962]: DEBUG oslo_vmware.rw_handles [None req-b03c4c30-9fd6-4923-9e36-c3edd6c4e2a5 tempest-ImagesTestJSON-240028984 tempest-ImagesTestJSON-240028984-project-member] Completed reading data from the image iterator. {{(pid=61962) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 1868.124278] env[61962]: DEBUG oslo_vmware.rw_handles [None req-b03c4c30-9fd6-4923-9e36-c3edd6c4e2a5 tempest-ImagesTestJSON-240028984 tempest-ImagesTestJSON-240028984-project-member] Closing write handle for https://esx7c1n1.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/f27edca6-1304-4f0d-9e18-e2527c3d2f88/f684bb17-3ab2-4bd5-a19e-4c36c57d0ebe/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=61962) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:281}} [ 1868.191244] env[61962]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-934cd910-8d70-48cb-9651-7705bc1565e2 {{(pid=61962) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1868.199853] env[61962]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-509c0501-9d39-4a83-9951-7ef5573b8ccf {{(pid=61962) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1868.229817] env[61962]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e01ccb56-7ebe-4d30-ae80-57eb173a13c0 {{(pid=61962) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1868.237451] env[61962]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-deabee88-91c1-4920-b61b-5202d851f30a {{(pid=61962) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1868.251854] env[61962]: DEBUG nova.compute.provider_tree [None req-ae999722-5278-4a0e-8508-ee195677c4b3 tempest-ServerMetadataTestJSON-1606436818 tempest-ServerMetadataTestJSON-1606436818-project-member] Inventory has not changed in ProviderTree for provider: 5a20dc57-fddd-49ec-bab5-953a03eebaa1 {{(pid=61962) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1868.261648] env[61962]: DEBUG nova.scheduler.client.report [None req-ae999722-5278-4a0e-8508-ee195677c4b3 tempest-ServerMetadataTestJSON-1606436818 tempest-ServerMetadataTestJSON-1606436818-project-member] Inventory has not changed for provider 5a20dc57-fddd-49ec-bab5-953a03eebaa1 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 96, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61962) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1868.277918] env[61962]: DEBUG oslo_concurrency.lockutils [None req-ae999722-5278-4a0e-8508-ee195677c4b3 tempest-ServerMetadataTestJSON-1606436818 tempest-ServerMetadataTestJSON-1606436818-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 0.301s {{(pid=61962) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1868.278435] env[61962]: ERROR nova.compute.manager [None req-ae999722-5278-4a0e-8508-ee195677c4b3 tempest-ServerMetadataTestJSON-1606436818 tempest-ServerMetadataTestJSON-1606436818-project-member] [instance: 3d2749f8-18d2-4631-adc2-093988125eb4] Failed to build and run instance: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1868.278435] env[61962]: Faults: ['InvalidArgument'] [ 1868.278435] env[61962]: ERROR nova.compute.manager [instance: 3d2749f8-18d2-4631-adc2-093988125eb4] Traceback (most recent call last): [ 1868.278435] env[61962]: ERROR nova.compute.manager [instance: 3d2749f8-18d2-4631-adc2-093988125eb4] File "/opt/stack/nova/nova/compute/manager.py", line 2633, in _build_and_run_instance [ 1868.278435] env[61962]: ERROR nova.compute.manager [instance: 3d2749f8-18d2-4631-adc2-093988125eb4] self.driver.spawn(context, instance, image_meta, [ 1868.278435] env[61962]: ERROR nova.compute.manager [instance: 3d2749f8-18d2-4631-adc2-093988125eb4] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 1868.278435] env[61962]: ERROR nova.compute.manager [instance: 3d2749f8-18d2-4631-adc2-093988125eb4] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1868.278435] env[61962]: ERROR nova.compute.manager [instance: 3d2749f8-18d2-4631-adc2-093988125eb4] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 1868.278435] env[61962]: ERROR nova.compute.manager [instance: 3d2749f8-18d2-4631-adc2-093988125eb4] self._fetch_image_if_missing(context, vi) [ 1868.278435] env[61962]: ERROR nova.compute.manager [instance: 3d2749f8-18d2-4631-adc2-093988125eb4] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 1868.278435] env[61962]: ERROR nova.compute.manager [instance: 3d2749f8-18d2-4631-adc2-093988125eb4] image_cache(vi, tmp_image_ds_loc) [ 1868.278435] env[61962]: ERROR nova.compute.manager [instance: 3d2749f8-18d2-4631-adc2-093988125eb4] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 1868.278893] env[61962]: ERROR nova.compute.manager [instance: 3d2749f8-18d2-4631-adc2-093988125eb4] vm_util.copy_virtual_disk( [ 1868.278893] env[61962]: ERROR nova.compute.manager [instance: 3d2749f8-18d2-4631-adc2-093988125eb4] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 1868.278893] env[61962]: ERROR nova.compute.manager [instance: 3d2749f8-18d2-4631-adc2-093988125eb4] session._wait_for_task(vmdk_copy_task) [ 1868.278893] env[61962]: ERROR nova.compute.manager [instance: 3d2749f8-18d2-4631-adc2-093988125eb4] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 1868.278893] env[61962]: ERROR nova.compute.manager [instance: 3d2749f8-18d2-4631-adc2-093988125eb4] return self.wait_for_task(task_ref) [ 1868.278893] env[61962]: ERROR nova.compute.manager [instance: 3d2749f8-18d2-4631-adc2-093988125eb4] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 1868.278893] env[61962]: ERROR nova.compute.manager [instance: 3d2749f8-18d2-4631-adc2-093988125eb4] return evt.wait() [ 1868.278893] env[61962]: ERROR nova.compute.manager [instance: 3d2749f8-18d2-4631-adc2-093988125eb4] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 1868.278893] env[61962]: ERROR nova.compute.manager [instance: 3d2749f8-18d2-4631-adc2-093988125eb4] result = hub.switch() [ 1868.278893] env[61962]: ERROR nova.compute.manager [instance: 3d2749f8-18d2-4631-adc2-093988125eb4] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 1868.278893] env[61962]: ERROR nova.compute.manager [instance: 3d2749f8-18d2-4631-adc2-093988125eb4] return self.greenlet.switch() [ 1868.278893] env[61962]: ERROR nova.compute.manager [instance: 3d2749f8-18d2-4631-adc2-093988125eb4] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 1868.278893] env[61962]: ERROR nova.compute.manager [instance: 3d2749f8-18d2-4631-adc2-093988125eb4] self.f(*self.args, **self.kw) [ 1868.279255] env[61962]: ERROR nova.compute.manager [instance: 3d2749f8-18d2-4631-adc2-093988125eb4] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 1868.279255] env[61962]: ERROR nova.compute.manager [instance: 3d2749f8-18d2-4631-adc2-093988125eb4] raise exceptions.translate_fault(task_info.error) [ 1868.279255] env[61962]: ERROR nova.compute.manager [instance: 3d2749f8-18d2-4631-adc2-093988125eb4] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1868.279255] env[61962]: ERROR nova.compute.manager [instance: 3d2749f8-18d2-4631-adc2-093988125eb4] Faults: ['InvalidArgument'] [ 1868.279255] env[61962]: ERROR nova.compute.manager [instance: 3d2749f8-18d2-4631-adc2-093988125eb4] [ 1868.279255] env[61962]: DEBUG nova.compute.utils [None req-ae999722-5278-4a0e-8508-ee195677c4b3 tempest-ServerMetadataTestJSON-1606436818 tempest-ServerMetadataTestJSON-1606436818-project-member] [instance: 3d2749f8-18d2-4631-adc2-093988125eb4] VimFaultException {{(pid=61962) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 1868.280568] env[61962]: DEBUG nova.compute.manager [None req-ae999722-5278-4a0e-8508-ee195677c4b3 tempest-ServerMetadataTestJSON-1606436818 tempest-ServerMetadataTestJSON-1606436818-project-member] [instance: 3d2749f8-18d2-4631-adc2-093988125eb4] Build of instance 3d2749f8-18d2-4631-adc2-093988125eb4 was re-scheduled: A specified parameter was not correct: fileType [ 1868.280568] env[61962]: Faults: ['InvalidArgument'] {{(pid=61962) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2472}} [ 1868.280945] env[61962]: DEBUG nova.compute.manager [None req-ae999722-5278-4a0e-8508-ee195677c4b3 tempest-ServerMetadataTestJSON-1606436818 tempest-ServerMetadataTestJSON-1606436818-project-member] [instance: 3d2749f8-18d2-4631-adc2-093988125eb4] Unplugging VIFs for instance {{(pid=61962) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:2998}} [ 1868.281134] env[61962]: DEBUG nova.compute.manager [None req-ae999722-5278-4a0e-8508-ee195677c4b3 tempest-ServerMetadataTestJSON-1606436818 tempest-ServerMetadataTestJSON-1606436818-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=61962) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3021}} [ 1868.281321] env[61962]: DEBUG nova.compute.manager [None req-ae999722-5278-4a0e-8508-ee195677c4b3 tempest-ServerMetadataTestJSON-1606436818 tempest-ServerMetadataTestJSON-1606436818-project-member] [instance: 3d2749f8-18d2-4631-adc2-093988125eb4] Deallocating network for instance {{(pid=61962) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2281}} [ 1868.281485] env[61962]: DEBUG nova.network.neutron [None req-ae999722-5278-4a0e-8508-ee195677c4b3 tempest-ServerMetadataTestJSON-1606436818 tempest-ServerMetadataTestJSON-1606436818-project-member] [instance: 3d2749f8-18d2-4631-adc2-093988125eb4] deallocate_for_instance() {{(pid=61962) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1868.745572] env[61962]: DEBUG nova.network.neutron [None req-ae999722-5278-4a0e-8508-ee195677c4b3 tempest-ServerMetadataTestJSON-1606436818 tempest-ServerMetadataTestJSON-1606436818-project-member] [instance: 3d2749f8-18d2-4631-adc2-093988125eb4] Updating instance_info_cache with network_info: [] {{(pid=61962) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1868.761743] env[61962]: INFO nova.compute.manager [None req-ae999722-5278-4a0e-8508-ee195677c4b3 tempest-ServerMetadataTestJSON-1606436818 tempest-ServerMetadataTestJSON-1606436818-project-member] [instance: 3d2749f8-18d2-4631-adc2-093988125eb4] Took 0.48 seconds to deallocate network for instance. [ 1868.899913] env[61962]: INFO nova.scheduler.client.report [None req-ae999722-5278-4a0e-8508-ee195677c4b3 tempest-ServerMetadataTestJSON-1606436818 tempest-ServerMetadataTestJSON-1606436818-project-member] Deleted allocations for instance 3d2749f8-18d2-4631-adc2-093988125eb4 [ 1868.921308] env[61962]: DEBUG oslo_concurrency.lockutils [None req-ae999722-5278-4a0e-8508-ee195677c4b3 tempest-ServerMetadataTestJSON-1606436818 tempest-ServerMetadataTestJSON-1606436818-project-member] Lock "3d2749f8-18d2-4631-adc2-093988125eb4" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 630.404s {{(pid=61962) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1868.921612] env[61962]: DEBUG oslo_concurrency.lockutils [None req-06cff809-8223-438f-9bd3-fbe8352c389a tempest-ServerMetadataTestJSON-1606436818 tempest-ServerMetadataTestJSON-1606436818-project-member] Lock "3d2749f8-18d2-4631-adc2-093988125eb4" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 434.673s {{(pid=61962) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1868.921871] env[61962]: DEBUG oslo_concurrency.lockutils [None req-06cff809-8223-438f-9bd3-fbe8352c389a tempest-ServerMetadataTestJSON-1606436818 tempest-ServerMetadataTestJSON-1606436818-project-member] Acquiring lock "3d2749f8-18d2-4631-adc2-093988125eb4-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=61962) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1868.922152] env[61962]: DEBUG oslo_concurrency.lockutils [None req-06cff809-8223-438f-9bd3-fbe8352c389a tempest-ServerMetadataTestJSON-1606436818 tempest-ServerMetadataTestJSON-1606436818-project-member] Lock "3d2749f8-18d2-4631-adc2-093988125eb4-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=61962) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1868.922368] env[61962]: DEBUG oslo_concurrency.lockutils [None req-06cff809-8223-438f-9bd3-fbe8352c389a tempest-ServerMetadataTestJSON-1606436818 tempest-ServerMetadataTestJSON-1606436818-project-member] Lock "3d2749f8-18d2-4631-adc2-093988125eb4-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=61962) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1868.924407] env[61962]: INFO nova.compute.manager [None req-06cff809-8223-438f-9bd3-fbe8352c389a tempest-ServerMetadataTestJSON-1606436818 tempest-ServerMetadataTestJSON-1606436818-project-member] [instance: 3d2749f8-18d2-4631-adc2-093988125eb4] Terminating instance [ 1868.926185] env[61962]: DEBUG nova.compute.manager [None req-06cff809-8223-438f-9bd3-fbe8352c389a tempest-ServerMetadataTestJSON-1606436818 tempest-ServerMetadataTestJSON-1606436818-project-member] [instance: 3d2749f8-18d2-4631-adc2-093988125eb4] Start destroying the instance on the hypervisor. {{(pid=61962) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3142}} [ 1868.926401] env[61962]: DEBUG nova.virt.vmwareapi.vmops [None req-06cff809-8223-438f-9bd3-fbe8352c389a tempest-ServerMetadataTestJSON-1606436818 tempest-ServerMetadataTestJSON-1606436818-project-member] [instance: 3d2749f8-18d2-4631-adc2-093988125eb4] Destroying instance {{(pid=61962) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1868.926880] env[61962]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-1a581013-663f-4e2f-aab9-7dfb522a36df {{(pid=61962) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1868.936398] env[61962]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6a10b6c4-bf01-4f98-9fb3-726df7f782fe {{(pid=61962) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1868.965798] env[61962]: WARNING nova.virt.vmwareapi.vmops [None req-06cff809-8223-438f-9bd3-fbe8352c389a tempest-ServerMetadataTestJSON-1606436818 tempest-ServerMetadataTestJSON-1606436818-project-member] [instance: 3d2749f8-18d2-4631-adc2-093988125eb4] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 3d2749f8-18d2-4631-adc2-093988125eb4 could not be found. [ 1868.966017] env[61962]: DEBUG nova.virt.vmwareapi.vmops [None req-06cff809-8223-438f-9bd3-fbe8352c389a tempest-ServerMetadataTestJSON-1606436818 tempest-ServerMetadataTestJSON-1606436818-project-member] [instance: 3d2749f8-18d2-4631-adc2-093988125eb4] Instance destroyed {{(pid=61962) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1868.966206] env[61962]: INFO nova.compute.manager [None req-06cff809-8223-438f-9bd3-fbe8352c389a tempest-ServerMetadataTestJSON-1606436818 tempest-ServerMetadataTestJSON-1606436818-project-member] [instance: 3d2749f8-18d2-4631-adc2-093988125eb4] Took 0.04 seconds to destroy the instance on the hypervisor. [ 1868.966484] env[61962]: DEBUG oslo.service.loopingcall [None req-06cff809-8223-438f-9bd3-fbe8352c389a tempest-ServerMetadataTestJSON-1606436818 tempest-ServerMetadataTestJSON-1606436818-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61962) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1868.966721] env[61962]: DEBUG nova.compute.manager [-] [instance: 3d2749f8-18d2-4631-adc2-093988125eb4] Deallocating network for instance {{(pid=61962) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2281}} [ 1868.966822] env[61962]: DEBUG nova.network.neutron [-] [instance: 3d2749f8-18d2-4631-adc2-093988125eb4] deallocate_for_instance() {{(pid=61962) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1868.992745] env[61962]: DEBUG nova.network.neutron [-] [instance: 3d2749f8-18d2-4631-adc2-093988125eb4] Updating instance_info_cache with network_info: [] {{(pid=61962) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1869.001730] env[61962]: INFO nova.compute.manager [-] [instance: 3d2749f8-18d2-4631-adc2-093988125eb4] Took 0.03 seconds to deallocate network for instance. [ 1869.117233] env[61962]: DEBUG oslo_concurrency.lockutils [None req-06cff809-8223-438f-9bd3-fbe8352c389a tempest-ServerMetadataTestJSON-1606436818 tempest-ServerMetadataTestJSON-1606436818-project-member] Lock "3d2749f8-18d2-4631-adc2-093988125eb4" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 0.195s {{(pid=61962) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1869.118047] env[61962]: DEBUG oslo_concurrency.lockutils [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Lock "3d2749f8-18d2-4631-adc2-093988125eb4" acquired by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: waited 131.600s {{(pid=61962) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1869.118249] env[61962]: INFO nova.compute.manager [None req-ba337279-4320-40ca-b616-7e590432f203 None None] [instance: 3d2749f8-18d2-4631-adc2-093988125eb4] During sync_power_state the instance has a pending task (deleting). Skip. [ 1869.118837] env[61962]: DEBUG oslo_concurrency.lockutils [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Lock "3d2749f8-18d2-4631-adc2-093988125eb4" "released" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: held 0.000s {{(pid=61962) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1874.138159] env[61962]: DEBUG oslo_concurrency.lockutils [None req-d38b05bf-5e20-46d2-9e7d-0ef646235a5c tempest-ImagesTestJSON-240028984 tempest-ImagesTestJSON-240028984-project-member] Acquiring lock "85003695-cb3e-4bce-9a3a-a1d799b36369" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=61962) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1891.160198] env[61962]: DEBUG oslo_service.periodic_task [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Running periodic task ComputeManager.update_available_resource {{(pid=61962) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1891.175055] env[61962]: DEBUG oslo_concurrency.lockutils [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=61962) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1891.175055] env[61962]: DEBUG oslo_concurrency.lockutils [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=61962) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1891.175055] env[61962]: DEBUG oslo_concurrency.lockutils [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=61962) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1891.175055] env[61962]: DEBUG nova.compute.resource_tracker [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=61962) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 1891.175533] env[61962]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-55ff12c6-fd23-40a9-a4e2-8a7aa95714c9 {{(pid=61962) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1891.186053] env[61962]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a90271e7-d465-4283-9259-7fb6f0c1790a {{(pid=61962) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1891.200389] env[61962]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ddbdfb51-1e75-473a-aeb5-87c31cf8fe88 {{(pid=61962) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1891.207146] env[61962]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-538a4d1a-0b70-469d-b368-740052e04415 {{(pid=61962) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1891.236597] env[61962]: DEBUG nova.compute.resource_tracker [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=180570MB free_disk=96GB free_vcpus=48 pci_devices=None {{(pid=61962) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 1891.236769] env[61962]: DEBUG oslo_concurrency.lockutils [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=61962) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1891.236951] env[61962]: DEBUG oslo_concurrency.lockutils [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=61962) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1891.304985] env[61962]: DEBUG nova.compute.resource_tracker [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Instance 1035139f-2193-4d1c-a220-369ef6096ae0 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61962) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 1891.305168] env[61962]: DEBUG nova.compute.resource_tracker [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Instance 53048279-c3ea-484f-9a3e-049b12892f7a actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61962) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 1891.305283] env[61962]: DEBUG nova.compute.resource_tracker [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Instance 25f6fa54-3d0d-4470-b5c9-06ba0ad2663d actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61962) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 1891.305453] env[61962]: DEBUG nova.compute.resource_tracker [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Instance 1e71cc99-f026-4197-b7f1-ae18f7f99af6 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61962) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 1891.305532] env[61962]: DEBUG nova.compute.resource_tracker [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Instance 2cf5409f-eaff-4b63-b8bb-eb15a7a81760 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61962) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 1891.305668] env[61962]: DEBUG nova.compute.resource_tracker [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Instance 02c198ea-61de-4e70-866c-39781d353d04 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61962) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 1891.305812] env[61962]: DEBUG nova.compute.resource_tracker [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Instance 85003695-cb3e-4bce-9a3a-a1d799b36369 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61962) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 1891.305998] env[61962]: DEBUG nova.compute.resource_tracker [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Total usable vcpus: 48, total allocated vcpus: 7 {{(pid=61962) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 1891.306159] env[61962]: DEBUG nova.compute.resource_tracker [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=1408MB phys_disk=100GB used_disk=7GB total_vcpus=48 used_vcpus=7 pci_stats=[] stats={'failed_builds': '80', 'num_instances': '7', 'num_vm_building': '7', 'num_task_deleting': '7', 'num_os_type_None': '7', 'num_proj_e58b68272c404983993a09ad12e8c03b': '2', 'io_workload': '7', 'num_proj_88a7b37d54f047149b05fc80e0b51601': '1', 'num_proj_40bdd99e47aa40a28edd38c6b4a09a5f': '1', 'num_proj_56bb35c701074f02820d93ad5911d089': '2', 'num_proj_cc34536d51ef4a22aef0b274ba5aa28e': '1'} {{(pid=61962) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 1891.401270] env[61962]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-04977a45-c7b6-4a1d-9ddd-12759aefa54c {{(pid=61962) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1891.409144] env[61962]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2443a8b2-320b-4d55-88be-67d7bda070c4 {{(pid=61962) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1891.439837] env[61962]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8620814a-2d9d-47eb-909f-18e0d07f2c91 {{(pid=61962) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1891.447627] env[61962]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-feb161c0-a040-4577-ae81-6bb7bedb5e4c {{(pid=61962) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1891.460540] env[61962]: DEBUG nova.compute.provider_tree [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Inventory has not changed in ProviderTree for provider: 5a20dc57-fddd-49ec-bab5-953a03eebaa1 {{(pid=61962) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1891.470923] env[61962]: DEBUG nova.scheduler.client.report [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Inventory has not changed for provider 5a20dc57-fddd-49ec-bab5-953a03eebaa1 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 96, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61962) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1891.485623] env[61962]: DEBUG nova.compute.resource_tracker [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=61962) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 1891.485807] env[61962]: DEBUG oslo_concurrency.lockutils [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 0.249s {{(pid=61962) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1893.484632] env[61962]: DEBUG oslo_service.periodic_task [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=61962) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1893.484922] env[61962]: DEBUG nova.compute.manager [None req-ba337279-4320-40ca-b616-7e590432f203 None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=61962) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10626}} [ 1899.153936] env[61962]: DEBUG oslo_service.periodic_task [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=61962) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1899.230443] env[61962]: DEBUG oslo_concurrency.lockutils [None req-baa26604-ba56-4dd3-8c26-a36534ca2fce tempest-ServersTestJSON-536512511 tempest-ServersTestJSON-536512511-project-member] Acquiring lock "1a74c820-67e9-424c-b4e6-333db3449251" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61962) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1899.230705] env[61962]: DEBUG oslo_concurrency.lockutils [None req-baa26604-ba56-4dd3-8c26-a36534ca2fce tempest-ServersTestJSON-536512511 tempest-ServersTestJSON-536512511-project-member] Lock "1a74c820-67e9-424c-b4e6-333db3449251" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61962) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1899.247088] env[61962]: DEBUG nova.compute.manager [None req-baa26604-ba56-4dd3-8c26-a36534ca2fce tempest-ServersTestJSON-536512511 tempest-ServersTestJSON-536512511-project-member] [instance: 1a74c820-67e9-424c-b4e6-333db3449251] Starting instance... {{(pid=61962) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2424}} [ 1899.309938] env[61962]: DEBUG oslo_concurrency.lockutils [None req-baa26604-ba56-4dd3-8c26-a36534ca2fce tempest-ServersTestJSON-536512511 tempest-ServersTestJSON-536512511-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61962) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1899.310201] env[61962]: DEBUG oslo_concurrency.lockutils [None req-baa26604-ba56-4dd3-8c26-a36534ca2fce tempest-ServersTestJSON-536512511 tempest-ServersTestJSON-536512511-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=61962) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1899.311703] env[61962]: INFO nova.compute.claims [None req-baa26604-ba56-4dd3-8c26-a36534ca2fce tempest-ServersTestJSON-536512511 tempest-ServersTestJSON-536512511-project-member] [instance: 1a74c820-67e9-424c-b4e6-333db3449251] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1899.483797] env[61962]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-73bc57be-5353-4094-b1c2-594cb28756b9 {{(pid=61962) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1899.493653] env[61962]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-747f87b0-431c-4d03-b60e-d1f981be0087 {{(pid=61962) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1899.526451] env[61962]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d6bc1839-b5f9-43b1-9993-5136637c9940 {{(pid=61962) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1899.536401] env[61962]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9d0c196b-e7ef-4e79-8fdd-6191156bec61 {{(pid=61962) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1899.551037] env[61962]: DEBUG nova.compute.provider_tree [None req-baa26604-ba56-4dd3-8c26-a36534ca2fce tempest-ServersTestJSON-536512511 tempest-ServersTestJSON-536512511-project-member] Inventory has not changed in ProviderTree for provider: 5a20dc57-fddd-49ec-bab5-953a03eebaa1 {{(pid=61962) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1899.561495] env[61962]: DEBUG nova.scheduler.client.report [None req-baa26604-ba56-4dd3-8c26-a36534ca2fce tempest-ServersTestJSON-536512511 tempest-ServersTestJSON-536512511-project-member] Inventory has not changed for provider 5a20dc57-fddd-49ec-bab5-953a03eebaa1 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 96, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61962) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1899.578961] env[61962]: DEBUG oslo_concurrency.lockutils [None req-baa26604-ba56-4dd3-8c26-a36534ca2fce tempest-ServersTestJSON-536512511 tempest-ServersTestJSON-536512511-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.269s {{(pid=61962) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1899.579485] env[61962]: DEBUG nova.compute.manager [None req-baa26604-ba56-4dd3-8c26-a36534ca2fce tempest-ServersTestJSON-536512511 tempest-ServersTestJSON-536512511-project-member] [instance: 1a74c820-67e9-424c-b4e6-333db3449251] Start building networks asynchronously for instance. {{(pid=61962) _build_resources /opt/stack/nova/nova/compute/manager.py:2821}} [ 1899.615041] env[61962]: DEBUG nova.compute.utils [None req-baa26604-ba56-4dd3-8c26-a36534ca2fce tempest-ServersTestJSON-536512511 tempest-ServersTestJSON-536512511-project-member] Using /dev/sd instead of None {{(pid=61962) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1899.616608] env[61962]: DEBUG nova.compute.manager [None req-baa26604-ba56-4dd3-8c26-a36534ca2fce tempest-ServersTestJSON-536512511 tempest-ServersTestJSON-536512511-project-member] [instance: 1a74c820-67e9-424c-b4e6-333db3449251] Allocating IP information in the background. {{(pid=61962) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1973}} [ 1899.616839] env[61962]: DEBUG nova.network.neutron [None req-baa26604-ba56-4dd3-8c26-a36534ca2fce tempest-ServersTestJSON-536512511 tempest-ServersTestJSON-536512511-project-member] [instance: 1a74c820-67e9-424c-b4e6-333db3449251] allocate_for_instance() {{(pid=61962) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 1899.630516] env[61962]: DEBUG nova.compute.manager [None req-baa26604-ba56-4dd3-8c26-a36534ca2fce tempest-ServersTestJSON-536512511 tempest-ServersTestJSON-536512511-project-member] [instance: 1a74c820-67e9-424c-b4e6-333db3449251] Start building block device mappings for instance. {{(pid=61962) _build_resources /opt/stack/nova/nova/compute/manager.py:2856}} [ 1899.687676] env[61962]: DEBUG nova.policy [None req-baa26604-ba56-4dd3-8c26-a36534ca2fce tempest-ServersTestJSON-536512511 tempest-ServersTestJSON-536512511-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '0be80fa75c4c49d899a3dac926c1ed37', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'cc34536d51ef4a22aef0b274ba5aa28e', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61962) authorize /opt/stack/nova/nova/policy.py:203}} [ 1899.697466] env[61962]: DEBUG nova.compute.manager [None req-baa26604-ba56-4dd3-8c26-a36534ca2fce tempest-ServersTestJSON-536512511 tempest-ServersTestJSON-536512511-project-member] [instance: 1a74c820-67e9-424c-b4e6-333db3449251] Start spawning the instance on the hypervisor. {{(pid=61962) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2630}} [ 1899.725574] env[61962]: DEBUG nova.virt.hardware [None req-baa26604-ba56-4dd3-8c26-a36534ca2fce tempest-ServersTestJSON-536512511 tempest-ServersTestJSON-536512511-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-12-01T12:09:23Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=128,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-12-01T12:09:07Z,direct_url=,disk_format='vmdk',id=f684bb17-3ab2-4bd5-a19e-4c36c57d0ebe,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='eef556fb5c4f49b889491ae31a496de5',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-12-01T12:09:08Z,virtual_size=,visibility=), allow threads: False {{(pid=61962) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1899.725817] env[61962]: DEBUG nova.virt.hardware [None req-baa26604-ba56-4dd3-8c26-a36534ca2fce tempest-ServersTestJSON-536512511 tempest-ServersTestJSON-536512511-project-member] Flavor limits 0:0:0 {{(pid=61962) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1899.725972] env[61962]: DEBUG nova.virt.hardware [None req-baa26604-ba56-4dd3-8c26-a36534ca2fce tempest-ServersTestJSON-536512511 tempest-ServersTestJSON-536512511-project-member] Image limits 0:0:0 {{(pid=61962) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1899.726172] env[61962]: DEBUG nova.virt.hardware [None req-baa26604-ba56-4dd3-8c26-a36534ca2fce tempest-ServersTestJSON-536512511 tempest-ServersTestJSON-536512511-project-member] Flavor pref 0:0:0 {{(pid=61962) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1899.726319] env[61962]: DEBUG nova.virt.hardware [None req-baa26604-ba56-4dd3-8c26-a36534ca2fce tempest-ServersTestJSON-536512511 tempest-ServersTestJSON-536512511-project-member] Image pref 0:0:0 {{(pid=61962) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1899.726467] env[61962]: DEBUG nova.virt.hardware [None req-baa26604-ba56-4dd3-8c26-a36534ca2fce tempest-ServersTestJSON-536512511 tempest-ServersTestJSON-536512511-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61962) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1899.726702] env[61962]: DEBUG nova.virt.hardware [None req-baa26604-ba56-4dd3-8c26-a36534ca2fce tempest-ServersTestJSON-536512511 tempest-ServersTestJSON-536512511-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61962) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1899.726866] env[61962]: DEBUG nova.virt.hardware [None req-baa26604-ba56-4dd3-8c26-a36534ca2fce tempest-ServersTestJSON-536512511 tempest-ServersTestJSON-536512511-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61962) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1899.727042] env[61962]: DEBUG nova.virt.hardware [None req-baa26604-ba56-4dd3-8c26-a36534ca2fce tempest-ServersTestJSON-536512511 tempest-ServersTestJSON-536512511-project-member] Got 1 possible topologies {{(pid=61962) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1899.727210] env[61962]: DEBUG nova.virt.hardware [None req-baa26604-ba56-4dd3-8c26-a36534ca2fce tempest-ServersTestJSON-536512511 tempest-ServersTestJSON-536512511-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61962) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1899.727381] env[61962]: DEBUG nova.virt.hardware [None req-baa26604-ba56-4dd3-8c26-a36534ca2fce tempest-ServersTestJSON-536512511 tempest-ServersTestJSON-536512511-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61962) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1899.728296] env[61962]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-aad470b4-0c93-4fca-8cd3-f9071d899e7e {{(pid=61962) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1899.737867] env[61962]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3b1da54d-09c7-44b2-8f57-79ab788aff8b {{(pid=61962) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1900.108689] env[61962]: DEBUG nova.network.neutron [None req-baa26604-ba56-4dd3-8c26-a36534ca2fce tempest-ServersTestJSON-536512511 tempest-ServersTestJSON-536512511-project-member] [instance: 1a74c820-67e9-424c-b4e6-333db3449251] Successfully created port: 9e5009c4-2e33-4b07-8b2e-776e025ff321 {{(pid=61962) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1900.757857] env[61962]: DEBUG nova.compute.manager [req-002536ab-3359-4062-9ed9-ef9fc45fe487 req-66ba490d-704f-4855-8171-32e065b3b7ae service nova] [instance: 1a74c820-67e9-424c-b4e6-333db3449251] Received event network-vif-plugged-9e5009c4-2e33-4b07-8b2e-776e025ff321 {{(pid=61962) external_instance_event /opt/stack/nova/nova/compute/manager.py:11210}} [ 1900.758169] env[61962]: DEBUG oslo_concurrency.lockutils [req-002536ab-3359-4062-9ed9-ef9fc45fe487 req-66ba490d-704f-4855-8171-32e065b3b7ae service nova] Acquiring lock "1a74c820-67e9-424c-b4e6-333db3449251-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=61962) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1900.758351] env[61962]: DEBUG oslo_concurrency.lockutils [req-002536ab-3359-4062-9ed9-ef9fc45fe487 req-66ba490d-704f-4855-8171-32e065b3b7ae service nova] Lock "1a74c820-67e9-424c-b4e6-333db3449251-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=61962) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1900.758458] env[61962]: DEBUG oslo_concurrency.lockutils [req-002536ab-3359-4062-9ed9-ef9fc45fe487 req-66ba490d-704f-4855-8171-32e065b3b7ae service nova] Lock "1a74c820-67e9-424c-b4e6-333db3449251-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=61962) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1900.758637] env[61962]: DEBUG nova.compute.manager [req-002536ab-3359-4062-9ed9-ef9fc45fe487 req-66ba490d-704f-4855-8171-32e065b3b7ae service nova] [instance: 1a74c820-67e9-424c-b4e6-333db3449251] No waiting events found dispatching network-vif-plugged-9e5009c4-2e33-4b07-8b2e-776e025ff321 {{(pid=61962) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1900.758771] env[61962]: WARNING nova.compute.manager [req-002536ab-3359-4062-9ed9-ef9fc45fe487 req-66ba490d-704f-4855-8171-32e065b3b7ae service nova] [instance: 1a74c820-67e9-424c-b4e6-333db3449251] Received unexpected event network-vif-plugged-9e5009c4-2e33-4b07-8b2e-776e025ff321 for instance with vm_state building and task_state spawning. [ 1900.830804] env[61962]: DEBUG nova.network.neutron [None req-baa26604-ba56-4dd3-8c26-a36534ca2fce tempest-ServersTestJSON-536512511 tempest-ServersTestJSON-536512511-project-member] [instance: 1a74c820-67e9-424c-b4e6-333db3449251] Successfully updated port: 9e5009c4-2e33-4b07-8b2e-776e025ff321 {{(pid=61962) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1900.845945] env[61962]: DEBUG oslo_concurrency.lockutils [None req-baa26604-ba56-4dd3-8c26-a36534ca2fce tempest-ServersTestJSON-536512511 tempest-ServersTestJSON-536512511-project-member] Acquiring lock "refresh_cache-1a74c820-67e9-424c-b4e6-333db3449251" {{(pid=61962) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1900.846099] env[61962]: DEBUG oslo_concurrency.lockutils [None req-baa26604-ba56-4dd3-8c26-a36534ca2fce tempest-ServersTestJSON-536512511 tempest-ServersTestJSON-536512511-project-member] Acquired lock "refresh_cache-1a74c820-67e9-424c-b4e6-333db3449251" {{(pid=61962) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1900.846252] env[61962]: DEBUG nova.network.neutron [None req-baa26604-ba56-4dd3-8c26-a36534ca2fce tempest-ServersTestJSON-536512511 tempest-ServersTestJSON-536512511-project-member] [instance: 1a74c820-67e9-424c-b4e6-333db3449251] Building network info cache for instance {{(pid=61962) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 1900.888200] env[61962]: DEBUG nova.network.neutron [None req-baa26604-ba56-4dd3-8c26-a36534ca2fce tempest-ServersTestJSON-536512511 tempest-ServersTestJSON-536512511-project-member] [instance: 1a74c820-67e9-424c-b4e6-333db3449251] Instance cache missing network info. {{(pid=61962) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 1901.066696] env[61962]: DEBUG nova.network.neutron [None req-baa26604-ba56-4dd3-8c26-a36534ca2fce tempest-ServersTestJSON-536512511 tempest-ServersTestJSON-536512511-project-member] [instance: 1a74c820-67e9-424c-b4e6-333db3449251] Updating instance_info_cache with network_info: [{"id": "9e5009c4-2e33-4b07-8b2e-776e025ff321", "address": "fa:16:3e:1c:d2:bd", "network": {"id": "d8431398-17df-4a56-85cb-1034c177202a", "bridge": "br-int", "label": "tempest-ServersTestJSON-1395925254-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "cc34536d51ef4a22aef0b274ba5aa28e", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c4d3f69a-b086-4c3b-b976-5a848b63dfc4", "external-id": "nsx-vlan-transportzone-627", "segmentation_id": 627, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap9e5009c4-2e", "ovs_interfaceid": "9e5009c4-2e33-4b07-8b2e-776e025ff321", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61962) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1901.079975] env[61962]: DEBUG oslo_concurrency.lockutils [None req-baa26604-ba56-4dd3-8c26-a36534ca2fce tempest-ServersTestJSON-536512511 tempest-ServersTestJSON-536512511-project-member] Releasing lock "refresh_cache-1a74c820-67e9-424c-b4e6-333db3449251" {{(pid=61962) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1901.080272] env[61962]: DEBUG nova.compute.manager [None req-baa26604-ba56-4dd3-8c26-a36534ca2fce tempest-ServersTestJSON-536512511 tempest-ServersTestJSON-536512511-project-member] [instance: 1a74c820-67e9-424c-b4e6-333db3449251] Instance network_info: |[{"id": "9e5009c4-2e33-4b07-8b2e-776e025ff321", "address": "fa:16:3e:1c:d2:bd", "network": {"id": "d8431398-17df-4a56-85cb-1034c177202a", "bridge": "br-int", "label": "tempest-ServersTestJSON-1395925254-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "cc34536d51ef4a22aef0b274ba5aa28e", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c4d3f69a-b086-4c3b-b976-5a848b63dfc4", "external-id": "nsx-vlan-transportzone-627", "segmentation_id": 627, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap9e5009c4-2e", "ovs_interfaceid": "9e5009c4-2e33-4b07-8b2e-776e025ff321", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=61962) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1901.080677] env[61962]: DEBUG nova.virt.vmwareapi.vmops [None req-baa26604-ba56-4dd3-8c26-a36534ca2fce tempest-ServersTestJSON-536512511 tempest-ServersTestJSON-536512511-project-member] [instance: 1a74c820-67e9-424c-b4e6-333db3449251] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:1c:d2:bd', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'c4d3f69a-b086-4c3b-b976-5a848b63dfc4', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '9e5009c4-2e33-4b07-8b2e-776e025ff321', 'vif_model': 'vmxnet3'}] {{(pid=61962) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1901.088945] env[61962]: DEBUG oslo.service.loopingcall [None req-baa26604-ba56-4dd3-8c26-a36534ca2fce tempest-ServersTestJSON-536512511 tempest-ServersTestJSON-536512511-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=61962) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1901.089581] env[61962]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 1a74c820-67e9-424c-b4e6-333db3449251] Creating VM on the ESX host {{(pid=61962) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 1901.089907] env[61962]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-ad43167f-0f80-4e8c-bff2-2a4154c54fd7 {{(pid=61962) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1901.113388] env[61962]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1901.113388] env[61962]: value = "task-4892086" [ 1901.113388] env[61962]: _type = "Task" [ 1901.113388] env[61962]: } to complete. {{(pid=61962) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1901.122207] env[61962]: DEBUG oslo_vmware.api [-] Task: {'id': task-4892086, 'name': CreateVM_Task} progress is 0%. {{(pid=61962) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1901.625022] env[61962]: DEBUG oslo_vmware.api [-] Task: {'id': task-4892086, 'name': CreateVM_Task, 'duration_secs': 0.405427} completed successfully. {{(pid=61962) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1901.625022] env[61962]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 1a74c820-67e9-424c-b4e6-333db3449251] Created VM on the ESX host {{(pid=61962) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 1901.631614] env[61962]: DEBUG oslo_concurrency.lockutils [None req-baa26604-ba56-4dd3-8c26-a36534ca2fce tempest-ServersTestJSON-536512511 tempest-ServersTestJSON-536512511-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/f684bb17-3ab2-4bd5-a19e-4c36c57d0ebe" {{(pid=61962) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1901.631786] env[61962]: DEBUG oslo_concurrency.lockutils [None req-baa26604-ba56-4dd3-8c26-a36534ca2fce tempest-ServersTestJSON-536512511 tempest-ServersTestJSON-536512511-project-member] Acquired lock "[datastore2] devstack-image-cache_base/f684bb17-3ab2-4bd5-a19e-4c36c57d0ebe" {{(pid=61962) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1901.632113] env[61962]: DEBUG oslo_concurrency.lockutils [None req-baa26604-ba56-4dd3-8c26-a36534ca2fce tempest-ServersTestJSON-536512511 tempest-ServersTestJSON-536512511-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/f684bb17-3ab2-4bd5-a19e-4c36c57d0ebe" {{(pid=61962) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1901.632359] env[61962]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-6ff87745-d71c-4b4d-9da5-55733ae11c79 {{(pid=61962) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1901.637125] env[61962]: DEBUG oslo_vmware.api [None req-baa26604-ba56-4dd3-8c26-a36534ca2fce tempest-ServersTestJSON-536512511 tempest-ServersTestJSON-536512511-project-member] Waiting for the task: (returnval){ [ 1901.637125] env[61962]: value = "session[5210918f-aadc-9b29-42fa-0929c2e42627]52bf5f38-bfb3-61e9-e16f-a7004e3d29f0" [ 1901.637125] env[61962]: _type = "Task" [ 1901.637125] env[61962]: } to complete. {{(pid=61962) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1901.645133] env[61962]: DEBUG oslo_vmware.api [None req-baa26604-ba56-4dd3-8c26-a36534ca2fce tempest-ServersTestJSON-536512511 tempest-ServersTestJSON-536512511-project-member] Task: {'id': session[5210918f-aadc-9b29-42fa-0929c2e42627]52bf5f38-bfb3-61e9-e16f-a7004e3d29f0, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61962) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1902.148185] env[61962]: DEBUG oslo_concurrency.lockutils [None req-baa26604-ba56-4dd3-8c26-a36534ca2fce tempest-ServersTestJSON-536512511 tempest-ServersTestJSON-536512511-project-member] Releasing lock "[datastore2] devstack-image-cache_base/f684bb17-3ab2-4bd5-a19e-4c36c57d0ebe" {{(pid=61962) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1902.148515] env[61962]: DEBUG nova.virt.vmwareapi.vmops [None req-baa26604-ba56-4dd3-8c26-a36534ca2fce tempest-ServersTestJSON-536512511 tempest-ServersTestJSON-536512511-project-member] [instance: 1a74c820-67e9-424c-b4e6-333db3449251] Processing image f684bb17-3ab2-4bd5-a19e-4c36c57d0ebe {{(pid=61962) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1902.148668] env[61962]: DEBUG oslo_concurrency.lockutils [None req-baa26604-ba56-4dd3-8c26-a36534ca2fce tempest-ServersTestJSON-536512511 tempest-ServersTestJSON-536512511-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/f684bb17-3ab2-4bd5-a19e-4c36c57d0ebe/f684bb17-3ab2-4bd5-a19e-4c36c57d0ebe.vmdk" {{(pid=61962) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1902.158221] env[61962]: DEBUG oslo_service.periodic_task [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=61962) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1902.158408] env[61962]: DEBUG oslo_service.periodic_task [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=61962) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1902.158564] env[61962]: DEBUG oslo_service.periodic_task [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=61962) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1902.784949] env[61962]: DEBUG nova.compute.manager [req-aedfc59e-57dd-4110-a775-9dc388519558 req-f10fae0e-3a86-4aa1-b294-65b1739a34ed service nova] [instance: 1a74c820-67e9-424c-b4e6-333db3449251] Received event network-changed-9e5009c4-2e33-4b07-8b2e-776e025ff321 {{(pid=61962) external_instance_event /opt/stack/nova/nova/compute/manager.py:11210}} [ 1902.785174] env[61962]: DEBUG nova.compute.manager [req-aedfc59e-57dd-4110-a775-9dc388519558 req-f10fae0e-3a86-4aa1-b294-65b1739a34ed service nova] [instance: 1a74c820-67e9-424c-b4e6-333db3449251] Refreshing instance network info cache due to event network-changed-9e5009c4-2e33-4b07-8b2e-776e025ff321. {{(pid=61962) external_instance_event /opt/stack/nova/nova/compute/manager.py:11215}} [ 1902.785371] env[61962]: DEBUG oslo_concurrency.lockutils [req-aedfc59e-57dd-4110-a775-9dc388519558 req-f10fae0e-3a86-4aa1-b294-65b1739a34ed service nova] Acquiring lock "refresh_cache-1a74c820-67e9-424c-b4e6-333db3449251" {{(pid=61962) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1902.785516] env[61962]: DEBUG oslo_concurrency.lockutils [req-aedfc59e-57dd-4110-a775-9dc388519558 req-f10fae0e-3a86-4aa1-b294-65b1739a34ed service nova] Acquired lock "refresh_cache-1a74c820-67e9-424c-b4e6-333db3449251" {{(pid=61962) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1902.785676] env[61962]: DEBUG nova.network.neutron [req-aedfc59e-57dd-4110-a775-9dc388519558 req-f10fae0e-3a86-4aa1-b294-65b1739a34ed service nova] [instance: 1a74c820-67e9-424c-b4e6-333db3449251] Refreshing network info cache for port 9e5009c4-2e33-4b07-8b2e-776e025ff321 {{(pid=61962) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 1903.058601] env[61962]: DEBUG nova.network.neutron [req-aedfc59e-57dd-4110-a775-9dc388519558 req-f10fae0e-3a86-4aa1-b294-65b1739a34ed service nova] [instance: 1a74c820-67e9-424c-b4e6-333db3449251] Updated VIF entry in instance network info cache for port 9e5009c4-2e33-4b07-8b2e-776e025ff321. {{(pid=61962) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 1903.058968] env[61962]: DEBUG nova.network.neutron [req-aedfc59e-57dd-4110-a775-9dc388519558 req-f10fae0e-3a86-4aa1-b294-65b1739a34ed service nova] [instance: 1a74c820-67e9-424c-b4e6-333db3449251] Updating instance_info_cache with network_info: [{"id": "9e5009c4-2e33-4b07-8b2e-776e025ff321", "address": "fa:16:3e:1c:d2:bd", "network": {"id": "d8431398-17df-4a56-85cb-1034c177202a", "bridge": "br-int", "label": "tempest-ServersTestJSON-1395925254-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "cc34536d51ef4a22aef0b274ba5aa28e", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c4d3f69a-b086-4c3b-b976-5a848b63dfc4", "external-id": "nsx-vlan-transportzone-627", "segmentation_id": 627, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap9e5009c4-2e", "ovs_interfaceid": "9e5009c4-2e33-4b07-8b2e-776e025ff321", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61962) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1903.068039] env[61962]: DEBUG oslo_concurrency.lockutils [req-aedfc59e-57dd-4110-a775-9dc388519558 req-f10fae0e-3a86-4aa1-b294-65b1739a34ed service nova] Releasing lock "refresh_cache-1a74c820-67e9-424c-b4e6-333db3449251" {{(pid=61962) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1905.158619] env[61962]: DEBUG oslo_service.periodic_task [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=61962) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1905.159045] env[61962]: DEBUG nova.compute.manager [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Starting heal instance info cache {{(pid=61962) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10007}} [ 1905.159045] env[61962]: DEBUG nova.compute.manager [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Rebuilding the list of instances to heal {{(pid=61962) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10011}} [ 1905.180388] env[61962]: DEBUG nova.compute.manager [None req-ba337279-4320-40ca-b616-7e590432f203 None None] [instance: 1035139f-2193-4d1c-a220-369ef6096ae0] Skipping network cache update for instance because it is Building. {{(pid=61962) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10020}} [ 1905.180578] env[61962]: DEBUG nova.compute.manager [None req-ba337279-4320-40ca-b616-7e590432f203 None None] [instance: 53048279-c3ea-484f-9a3e-049b12892f7a] Skipping network cache update for instance because it is Building. {{(pid=61962) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10020}} [ 1905.180678] env[61962]: DEBUG nova.compute.manager [None req-ba337279-4320-40ca-b616-7e590432f203 None None] [instance: 25f6fa54-3d0d-4470-b5c9-06ba0ad2663d] Skipping network cache update for instance because it is Building. {{(pid=61962) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10020}} [ 1905.180808] env[61962]: DEBUG nova.compute.manager [None req-ba337279-4320-40ca-b616-7e590432f203 None None] [instance: 1e71cc99-f026-4197-b7f1-ae18f7f99af6] Skipping network cache update for instance because it is Building. {{(pid=61962) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10020}} [ 1905.180945] env[61962]: DEBUG nova.compute.manager [None req-ba337279-4320-40ca-b616-7e590432f203 None None] [instance: 2cf5409f-eaff-4b63-b8bb-eb15a7a81760] Skipping network cache update for instance because it is Building. {{(pid=61962) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10020}} [ 1905.181077] env[61962]: DEBUG nova.compute.manager [None req-ba337279-4320-40ca-b616-7e590432f203 None None] [instance: 02c198ea-61de-4e70-866c-39781d353d04] Skipping network cache update for instance because it is Building. {{(pid=61962) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10020}} [ 1905.181199] env[61962]: DEBUG nova.compute.manager [None req-ba337279-4320-40ca-b616-7e590432f203 None None] [instance: 85003695-cb3e-4bce-9a3a-a1d799b36369] Skipping network cache update for instance because it is Building. {{(pid=61962) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10020}} [ 1905.181316] env[61962]: DEBUG nova.compute.manager [None req-ba337279-4320-40ca-b616-7e590432f203 None None] [instance: 1a74c820-67e9-424c-b4e6-333db3449251] Skipping network cache update for instance because it is Building. {{(pid=61962) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10020}} [ 1905.181432] env[61962]: DEBUG nova.compute.manager [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Didn't find any instances for network info cache update. {{(pid=61962) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10093}} [ 1907.158624] env[61962]: DEBUG oslo_service.periodic_task [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=61962) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1909.158759] env[61962]: DEBUG oslo_service.periodic_task [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=61962) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1917.800448] env[61962]: WARNING oslo_vmware.rw_handles [None req-b03c4c30-9fd6-4923-9e36-c3edd6c4e2a5 tempest-ImagesTestJSON-240028984 tempest-ImagesTestJSON-240028984-project-member] Error occurred while reading the HTTP response.: http.client.RemoteDisconnected: Remote end closed connection without response [ 1917.800448] env[61962]: ERROR oslo_vmware.rw_handles Traceback (most recent call last): [ 1917.800448] env[61962]: ERROR oslo_vmware.rw_handles File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py", line 283, in close [ 1917.800448] env[61962]: ERROR oslo_vmware.rw_handles self._conn.getresponse() [ 1917.800448] env[61962]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 1375, in getresponse [ 1917.800448] env[61962]: ERROR oslo_vmware.rw_handles response.begin() [ 1917.800448] env[61962]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 318, in begin [ 1917.800448] env[61962]: ERROR oslo_vmware.rw_handles version, status, reason = self._read_status() [ 1917.800448] env[61962]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 287, in _read_status [ 1917.800448] env[61962]: ERROR oslo_vmware.rw_handles raise RemoteDisconnected("Remote end closed connection without" [ 1917.800448] env[61962]: ERROR oslo_vmware.rw_handles http.client.RemoteDisconnected: Remote end closed connection without response [ 1917.800448] env[61962]: ERROR oslo_vmware.rw_handles [ 1917.801293] env[61962]: DEBUG nova.virt.vmwareapi.images [None req-b03c4c30-9fd6-4923-9e36-c3edd6c4e2a5 tempest-ImagesTestJSON-240028984 tempest-ImagesTestJSON-240028984-project-member] [instance: 1035139f-2193-4d1c-a220-369ef6096ae0] Downloaded image file data f684bb17-3ab2-4bd5-a19e-4c36c57d0ebe to vmware_temp/f27edca6-1304-4f0d-9e18-e2527c3d2f88/f684bb17-3ab2-4bd5-a19e-4c36c57d0ebe/tmp-sparse.vmdk on the data store datastore2 {{(pid=61962) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:258}} [ 1917.802857] env[61962]: DEBUG nova.virt.vmwareapi.vmops [None req-b03c4c30-9fd6-4923-9e36-c3edd6c4e2a5 tempest-ImagesTestJSON-240028984 tempest-ImagesTestJSON-240028984-project-member] [instance: 1035139f-2193-4d1c-a220-369ef6096ae0] Caching image {{(pid=61962) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 1917.803111] env[61962]: DEBUG nova.virt.vmwareapi.vm_util [None req-b03c4c30-9fd6-4923-9e36-c3edd6c4e2a5 tempest-ImagesTestJSON-240028984 tempest-ImagesTestJSON-240028984-project-member] Copying Virtual Disk [datastore2] vmware_temp/f27edca6-1304-4f0d-9e18-e2527c3d2f88/f684bb17-3ab2-4bd5-a19e-4c36c57d0ebe/tmp-sparse.vmdk to [datastore2] vmware_temp/f27edca6-1304-4f0d-9e18-e2527c3d2f88/f684bb17-3ab2-4bd5-a19e-4c36c57d0ebe/f684bb17-3ab2-4bd5-a19e-4c36c57d0ebe.vmdk {{(pid=61962) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 1917.803401] env[61962]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-87c416c0-f10f-49cb-a566-70241bdb6053 {{(pid=61962) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1917.813184] env[61962]: DEBUG oslo_vmware.api [None req-b03c4c30-9fd6-4923-9e36-c3edd6c4e2a5 tempest-ImagesTestJSON-240028984 tempest-ImagesTestJSON-240028984-project-member] Waiting for the task: (returnval){ [ 1917.813184] env[61962]: value = "task-4892087" [ 1917.813184] env[61962]: _type = "Task" [ 1917.813184] env[61962]: } to complete. {{(pid=61962) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1917.821447] env[61962]: DEBUG oslo_vmware.api [None req-b03c4c30-9fd6-4923-9e36-c3edd6c4e2a5 tempest-ImagesTestJSON-240028984 tempest-ImagesTestJSON-240028984-project-member] Task: {'id': task-4892087, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61962) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1918.323996] env[61962]: DEBUG oslo_vmware.exceptions [None req-b03c4c30-9fd6-4923-9e36-c3edd6c4e2a5 tempest-ImagesTestJSON-240028984 tempest-ImagesTestJSON-240028984-project-member] Fault InvalidArgument not matched. {{(pid=61962) get_fault_class /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/exceptions.py:290}} [ 1918.324262] env[61962]: DEBUG oslo_concurrency.lockutils [None req-b03c4c30-9fd6-4923-9e36-c3edd6c4e2a5 tempest-ImagesTestJSON-240028984 tempest-ImagesTestJSON-240028984-project-member] Releasing lock "[datastore2] devstack-image-cache_base/f684bb17-3ab2-4bd5-a19e-4c36c57d0ebe/f684bb17-3ab2-4bd5-a19e-4c36c57d0ebe.vmdk" {{(pid=61962) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1918.324873] env[61962]: ERROR nova.compute.manager [None req-b03c4c30-9fd6-4923-9e36-c3edd6c4e2a5 tempest-ImagesTestJSON-240028984 tempest-ImagesTestJSON-240028984-project-member] [instance: 1035139f-2193-4d1c-a220-369ef6096ae0] Instance failed to spawn: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1918.324873] env[61962]: Faults: ['InvalidArgument'] [ 1918.324873] env[61962]: ERROR nova.compute.manager [instance: 1035139f-2193-4d1c-a220-369ef6096ae0] Traceback (most recent call last): [ 1918.324873] env[61962]: ERROR nova.compute.manager [instance: 1035139f-2193-4d1c-a220-369ef6096ae0] File "/opt/stack/nova/nova/compute/manager.py", line 2886, in _build_resources [ 1918.324873] env[61962]: ERROR nova.compute.manager [instance: 1035139f-2193-4d1c-a220-369ef6096ae0] yield resources [ 1918.324873] env[61962]: ERROR nova.compute.manager [instance: 1035139f-2193-4d1c-a220-369ef6096ae0] File "/opt/stack/nova/nova/compute/manager.py", line 2633, in _build_and_run_instance [ 1918.324873] env[61962]: ERROR nova.compute.manager [instance: 1035139f-2193-4d1c-a220-369ef6096ae0] self.driver.spawn(context, instance, image_meta, [ 1918.324873] env[61962]: ERROR nova.compute.manager [instance: 1035139f-2193-4d1c-a220-369ef6096ae0] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 1918.324873] env[61962]: ERROR nova.compute.manager [instance: 1035139f-2193-4d1c-a220-369ef6096ae0] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1918.324873] env[61962]: ERROR nova.compute.manager [instance: 1035139f-2193-4d1c-a220-369ef6096ae0] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 1918.324873] env[61962]: ERROR nova.compute.manager [instance: 1035139f-2193-4d1c-a220-369ef6096ae0] self._fetch_image_if_missing(context, vi) [ 1918.324873] env[61962]: ERROR nova.compute.manager [instance: 1035139f-2193-4d1c-a220-369ef6096ae0] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 1918.324873] env[61962]: ERROR nova.compute.manager [instance: 1035139f-2193-4d1c-a220-369ef6096ae0] image_cache(vi, tmp_image_ds_loc) [ 1918.325578] env[61962]: ERROR nova.compute.manager [instance: 1035139f-2193-4d1c-a220-369ef6096ae0] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 1918.325578] env[61962]: ERROR nova.compute.manager [instance: 1035139f-2193-4d1c-a220-369ef6096ae0] vm_util.copy_virtual_disk( [ 1918.325578] env[61962]: ERROR nova.compute.manager [instance: 1035139f-2193-4d1c-a220-369ef6096ae0] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 1918.325578] env[61962]: ERROR nova.compute.manager [instance: 1035139f-2193-4d1c-a220-369ef6096ae0] session._wait_for_task(vmdk_copy_task) [ 1918.325578] env[61962]: ERROR nova.compute.manager [instance: 1035139f-2193-4d1c-a220-369ef6096ae0] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 1918.325578] env[61962]: ERROR nova.compute.manager [instance: 1035139f-2193-4d1c-a220-369ef6096ae0] return self.wait_for_task(task_ref) [ 1918.325578] env[61962]: ERROR nova.compute.manager [instance: 1035139f-2193-4d1c-a220-369ef6096ae0] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 1918.325578] env[61962]: ERROR nova.compute.manager [instance: 1035139f-2193-4d1c-a220-369ef6096ae0] return evt.wait() [ 1918.325578] env[61962]: ERROR nova.compute.manager [instance: 1035139f-2193-4d1c-a220-369ef6096ae0] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 1918.325578] env[61962]: ERROR nova.compute.manager [instance: 1035139f-2193-4d1c-a220-369ef6096ae0] result = hub.switch() [ 1918.325578] env[61962]: ERROR nova.compute.manager [instance: 1035139f-2193-4d1c-a220-369ef6096ae0] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 1918.325578] env[61962]: ERROR nova.compute.manager [instance: 1035139f-2193-4d1c-a220-369ef6096ae0] return self.greenlet.switch() [ 1918.325578] env[61962]: ERROR nova.compute.manager [instance: 1035139f-2193-4d1c-a220-369ef6096ae0] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 1918.325997] env[61962]: ERROR nova.compute.manager [instance: 1035139f-2193-4d1c-a220-369ef6096ae0] self.f(*self.args, **self.kw) [ 1918.325997] env[61962]: ERROR nova.compute.manager [instance: 1035139f-2193-4d1c-a220-369ef6096ae0] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 1918.325997] env[61962]: ERROR nova.compute.manager [instance: 1035139f-2193-4d1c-a220-369ef6096ae0] raise exceptions.translate_fault(task_info.error) [ 1918.325997] env[61962]: ERROR nova.compute.manager [instance: 1035139f-2193-4d1c-a220-369ef6096ae0] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1918.325997] env[61962]: ERROR nova.compute.manager [instance: 1035139f-2193-4d1c-a220-369ef6096ae0] Faults: ['InvalidArgument'] [ 1918.325997] env[61962]: ERROR nova.compute.manager [instance: 1035139f-2193-4d1c-a220-369ef6096ae0] [ 1918.325997] env[61962]: INFO nova.compute.manager [None req-b03c4c30-9fd6-4923-9e36-c3edd6c4e2a5 tempest-ImagesTestJSON-240028984 tempest-ImagesTestJSON-240028984-project-member] [instance: 1035139f-2193-4d1c-a220-369ef6096ae0] Terminating instance [ 1918.326782] env[61962]: DEBUG oslo_concurrency.lockutils [None req-e21c42e3-3594-408e-b517-f54e516b53cd tempest-ServersNegativeTestMultiTenantJSON-212472390 tempest-ServersNegativeTestMultiTenantJSON-212472390-project-member] Acquired lock "[datastore2] devstack-image-cache_base/f684bb17-3ab2-4bd5-a19e-4c36c57d0ebe/f684bb17-3ab2-4bd5-a19e-4c36c57d0ebe.vmdk" {{(pid=61962) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1918.326995] env[61962]: DEBUG nova.virt.vmwareapi.ds_util [None req-e21c42e3-3594-408e-b517-f54e516b53cd tempest-ServersNegativeTestMultiTenantJSON-212472390 tempest-ServersNegativeTestMultiTenantJSON-212472390-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=61962) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1918.327261] env[61962]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-d2aee8ca-8236-4621-b1da-4fc16430c1dc {{(pid=61962) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1918.329536] env[61962]: DEBUG nova.compute.manager [None req-b03c4c30-9fd6-4923-9e36-c3edd6c4e2a5 tempest-ImagesTestJSON-240028984 tempest-ImagesTestJSON-240028984-project-member] [instance: 1035139f-2193-4d1c-a220-369ef6096ae0] Start destroying the instance on the hypervisor. {{(pid=61962) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3142}} [ 1918.329728] env[61962]: DEBUG nova.virt.vmwareapi.vmops [None req-b03c4c30-9fd6-4923-9e36-c3edd6c4e2a5 tempest-ImagesTestJSON-240028984 tempest-ImagesTestJSON-240028984-project-member] [instance: 1035139f-2193-4d1c-a220-369ef6096ae0] Destroying instance {{(pid=61962) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1918.330483] env[61962]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b81e1696-592a-4501-a718-7d10a64c15c3 {{(pid=61962) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1918.339194] env[61962]: DEBUG nova.virt.vmwareapi.vmops [None req-b03c4c30-9fd6-4923-9e36-c3edd6c4e2a5 tempest-ImagesTestJSON-240028984 tempest-ImagesTestJSON-240028984-project-member] [instance: 1035139f-2193-4d1c-a220-369ef6096ae0] Unregistering the VM {{(pid=61962) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 1918.339194] env[61962]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-9640f6b8-d2c2-45f6-aa36-b9cb36eb11c4 {{(pid=61962) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1918.340813] env[61962]: DEBUG nova.virt.vmwareapi.ds_util [None req-e21c42e3-3594-408e-b517-f54e516b53cd tempest-ServersNegativeTestMultiTenantJSON-212472390 tempest-ServersNegativeTestMultiTenantJSON-212472390-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=61962) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1918.340972] env[61962]: DEBUG nova.virt.vmwareapi.vmops [None req-e21c42e3-3594-408e-b517-f54e516b53cd tempest-ServersNegativeTestMultiTenantJSON-212472390 tempest-ServersNegativeTestMultiTenantJSON-212472390-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=61962) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 1918.341936] env[61962]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-b7fcc801-87bd-40bc-9c81-efb4587cb53b {{(pid=61962) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1918.346901] env[61962]: DEBUG oslo_vmware.api [None req-e21c42e3-3594-408e-b517-f54e516b53cd tempest-ServersNegativeTestMultiTenantJSON-212472390 tempest-ServersNegativeTestMultiTenantJSON-212472390-project-member] Waiting for the task: (returnval){ [ 1918.346901] env[61962]: value = "session[5210918f-aadc-9b29-42fa-0929c2e42627]5212cf27-5ca5-c695-513d-f81c0f9e351e" [ 1918.346901] env[61962]: _type = "Task" [ 1918.346901] env[61962]: } to complete. {{(pid=61962) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1918.354857] env[61962]: DEBUG oslo_vmware.api [None req-e21c42e3-3594-408e-b517-f54e516b53cd tempest-ServersNegativeTestMultiTenantJSON-212472390 tempest-ServersNegativeTestMultiTenantJSON-212472390-project-member] Task: {'id': session[5210918f-aadc-9b29-42fa-0929c2e42627]5212cf27-5ca5-c695-513d-f81c0f9e351e, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61962) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1918.415882] env[61962]: DEBUG nova.virt.vmwareapi.vmops [None req-b03c4c30-9fd6-4923-9e36-c3edd6c4e2a5 tempest-ImagesTestJSON-240028984 tempest-ImagesTestJSON-240028984-project-member] [instance: 1035139f-2193-4d1c-a220-369ef6096ae0] Unregistered the VM {{(pid=61962) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 1918.416117] env[61962]: DEBUG nova.virt.vmwareapi.vmops [None req-b03c4c30-9fd6-4923-9e36-c3edd6c4e2a5 tempest-ImagesTestJSON-240028984 tempest-ImagesTestJSON-240028984-project-member] [instance: 1035139f-2193-4d1c-a220-369ef6096ae0] Deleting contents of the VM from datastore datastore2 {{(pid=61962) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 1918.416301] env[61962]: DEBUG nova.virt.vmwareapi.ds_util [None req-b03c4c30-9fd6-4923-9e36-c3edd6c4e2a5 tempest-ImagesTestJSON-240028984 tempest-ImagesTestJSON-240028984-project-member] Deleting the datastore file [datastore2] 1035139f-2193-4d1c-a220-369ef6096ae0 {{(pid=61962) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1918.416574] env[61962]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-6524f469-783a-4050-840e-027fd8e1debf {{(pid=61962) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1918.422904] env[61962]: DEBUG oslo_vmware.api [None req-b03c4c30-9fd6-4923-9e36-c3edd6c4e2a5 tempest-ImagesTestJSON-240028984 tempest-ImagesTestJSON-240028984-project-member] Waiting for the task: (returnval){ [ 1918.422904] env[61962]: value = "task-4892089" [ 1918.422904] env[61962]: _type = "Task" [ 1918.422904] env[61962]: } to complete. {{(pid=61962) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1918.431086] env[61962]: DEBUG oslo_vmware.api [None req-b03c4c30-9fd6-4923-9e36-c3edd6c4e2a5 tempest-ImagesTestJSON-240028984 tempest-ImagesTestJSON-240028984-project-member] Task: {'id': task-4892089, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61962) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1918.857367] env[61962]: DEBUG nova.virt.vmwareapi.vmops [None req-e21c42e3-3594-408e-b517-f54e516b53cd tempest-ServersNegativeTestMultiTenantJSON-212472390 tempest-ServersNegativeTestMultiTenantJSON-212472390-project-member] [instance: 53048279-c3ea-484f-9a3e-049b12892f7a] Preparing fetch location {{(pid=61962) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 1918.857766] env[61962]: DEBUG nova.virt.vmwareapi.ds_util [None req-e21c42e3-3594-408e-b517-f54e516b53cd tempest-ServersNegativeTestMultiTenantJSON-212472390 tempest-ServersNegativeTestMultiTenantJSON-212472390-project-member] Creating directory with path [datastore2] vmware_temp/8a935267-43f5-469e-b99f-2c374b34ba6b/f684bb17-3ab2-4bd5-a19e-4c36c57d0ebe {{(pid=61962) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1918.857919] env[61962]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-e6659098-a9d1-4731-9509-424c38a69f88 {{(pid=61962) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1918.876014] env[61962]: DEBUG nova.virt.vmwareapi.ds_util [None req-e21c42e3-3594-408e-b517-f54e516b53cd tempest-ServersNegativeTestMultiTenantJSON-212472390 tempest-ServersNegativeTestMultiTenantJSON-212472390-project-member] Created directory with path [datastore2] vmware_temp/8a935267-43f5-469e-b99f-2c374b34ba6b/f684bb17-3ab2-4bd5-a19e-4c36c57d0ebe {{(pid=61962) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1918.876251] env[61962]: DEBUG nova.virt.vmwareapi.vmops [None req-e21c42e3-3594-408e-b517-f54e516b53cd tempest-ServersNegativeTestMultiTenantJSON-212472390 tempest-ServersNegativeTestMultiTenantJSON-212472390-project-member] [instance: 53048279-c3ea-484f-9a3e-049b12892f7a] Fetch image to [datastore2] vmware_temp/8a935267-43f5-469e-b99f-2c374b34ba6b/f684bb17-3ab2-4bd5-a19e-4c36c57d0ebe/tmp-sparse.vmdk {{(pid=61962) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 1918.876402] env[61962]: DEBUG nova.virt.vmwareapi.vmops [None req-e21c42e3-3594-408e-b517-f54e516b53cd tempest-ServersNegativeTestMultiTenantJSON-212472390 tempest-ServersNegativeTestMultiTenantJSON-212472390-project-member] [instance: 53048279-c3ea-484f-9a3e-049b12892f7a] Downloading image file data f684bb17-3ab2-4bd5-a19e-4c36c57d0ebe to [datastore2] vmware_temp/8a935267-43f5-469e-b99f-2c374b34ba6b/f684bb17-3ab2-4bd5-a19e-4c36c57d0ebe/tmp-sparse.vmdk on the data store datastore2 {{(pid=61962) _fetch_image_as_file /opt/stack/nova/nova/virt/vmwareapi/vmops.py:399}} [ 1918.877222] env[61962]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-369917ef-b9f0-4dfa-899b-f84ae9ad28b6 {{(pid=61962) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1918.884677] env[61962]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-657d91c2-ba9e-4423-8653-a184288f1cd9 {{(pid=61962) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1918.894285] env[61962]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d07fb03d-07b1-4df3-8b5a-640d0e626b89 {{(pid=61962) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1918.929123] env[61962]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-46790809-8fcf-4c81-8e9d-40a10d57c75f {{(pid=61962) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1918.937153] env[61962]: DEBUG oslo_vmware.api [None req-b03c4c30-9fd6-4923-9e36-c3edd6c4e2a5 tempest-ImagesTestJSON-240028984 tempest-ImagesTestJSON-240028984-project-member] Task: {'id': task-4892089, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.119627} completed successfully. {{(pid=61962) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1918.938699] env[61962]: DEBUG nova.virt.vmwareapi.ds_util [None req-b03c4c30-9fd6-4923-9e36-c3edd6c4e2a5 tempest-ImagesTestJSON-240028984 tempest-ImagesTestJSON-240028984-project-member] Deleted the datastore file {{(pid=61962) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1918.938904] env[61962]: DEBUG nova.virt.vmwareapi.vmops [None req-b03c4c30-9fd6-4923-9e36-c3edd6c4e2a5 tempest-ImagesTestJSON-240028984 tempest-ImagesTestJSON-240028984-project-member] [instance: 1035139f-2193-4d1c-a220-369ef6096ae0] Deleted contents of the VM from datastore datastore2 {{(pid=61962) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 1918.939090] env[61962]: DEBUG nova.virt.vmwareapi.vmops [None req-b03c4c30-9fd6-4923-9e36-c3edd6c4e2a5 tempest-ImagesTestJSON-240028984 tempest-ImagesTestJSON-240028984-project-member] [instance: 1035139f-2193-4d1c-a220-369ef6096ae0] Instance destroyed {{(pid=61962) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1918.939268] env[61962]: INFO nova.compute.manager [None req-b03c4c30-9fd6-4923-9e36-c3edd6c4e2a5 tempest-ImagesTestJSON-240028984 tempest-ImagesTestJSON-240028984-project-member] [instance: 1035139f-2193-4d1c-a220-369ef6096ae0] Took 0.61 seconds to destroy the instance on the hypervisor. [ 1918.941062] env[61962]: DEBUG oslo_vmware.service [-] Invoking SessionManager.AcquireGenericServiceTicket with opID=oslo.vmware-1e10c68d-0e0f-4ccc-8613-3ae4a85a3553 {{(pid=61962) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1918.943050] env[61962]: DEBUG nova.compute.claims [None req-b03c4c30-9fd6-4923-9e36-c3edd6c4e2a5 tempest-ImagesTestJSON-240028984 tempest-ImagesTestJSON-240028984-project-member] [instance: 1035139f-2193-4d1c-a220-369ef6096ae0] Aborting claim: {{(pid=61962) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 1918.943230] env[61962]: DEBUG oslo_concurrency.lockutils [None req-b03c4c30-9fd6-4923-9e36-c3edd6c4e2a5 tempest-ImagesTestJSON-240028984 tempest-ImagesTestJSON-240028984-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=61962) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1918.943438] env[61962]: DEBUG oslo_concurrency.lockutils [None req-b03c4c30-9fd6-4923-9e36-c3edd6c4e2a5 tempest-ImagesTestJSON-240028984 tempest-ImagesTestJSON-240028984-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 0.000s {{(pid=61962) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1918.970778] env[61962]: DEBUG nova.virt.vmwareapi.images [None req-e21c42e3-3594-408e-b517-f54e516b53cd tempest-ServersNegativeTestMultiTenantJSON-212472390 tempest-ServersNegativeTestMultiTenantJSON-212472390-project-member] [instance: 53048279-c3ea-484f-9a3e-049b12892f7a] Downloading image file data f684bb17-3ab2-4bd5-a19e-4c36c57d0ebe to the data store datastore2 {{(pid=61962) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:245}} [ 1919.036606] env[61962]: DEBUG oslo_vmware.rw_handles [None req-e21c42e3-3594-408e-b517-f54e516b53cd tempest-ServersNegativeTestMultiTenantJSON-212472390 tempest-ServersNegativeTestMultiTenantJSON-212472390-project-member] Creating HTTP connection to write to file with size = 21318656 and URL = https://esx7c1n1.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/8a935267-43f5-469e-b99f-2c374b34ba6b/f684bb17-3ab2-4bd5-a19e-4c36c57d0ebe/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=61962) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 1919.097360] env[61962]: DEBUG oslo_vmware.rw_handles [None req-e21c42e3-3594-408e-b517-f54e516b53cd tempest-ServersNegativeTestMultiTenantJSON-212472390 tempest-ServersNegativeTestMultiTenantJSON-212472390-project-member] Completed reading data from the image iterator. {{(pid=61962) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 1919.097486] env[61962]: DEBUG oslo_vmware.rw_handles [None req-e21c42e3-3594-408e-b517-f54e516b53cd tempest-ServersNegativeTestMultiTenantJSON-212472390 tempest-ServersNegativeTestMultiTenantJSON-212472390-project-member] Closing write handle for https://esx7c1n1.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/8a935267-43f5-469e-b99f-2c374b34ba6b/f684bb17-3ab2-4bd5-a19e-4c36c57d0ebe/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=61962) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:281}} [ 1919.199770] env[61962]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-19e8da01-bab9-47d7-b3f1-161bcef643ca {{(pid=61962) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1919.208974] env[61962]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5eac3fe5-111f-4dc8-976c-cb442b60b763 {{(pid=61962) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1919.241395] env[61962]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-009d7249-cb0a-4f66-9c10-7dee45335795 {{(pid=61962) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1919.249542] env[61962]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-37f20bce-044d-4c8b-8cd8-396ce22e5454 {{(pid=61962) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1919.263134] env[61962]: DEBUG nova.compute.provider_tree [None req-b03c4c30-9fd6-4923-9e36-c3edd6c4e2a5 tempest-ImagesTestJSON-240028984 tempest-ImagesTestJSON-240028984-project-member] Inventory has not changed in ProviderTree for provider: 5a20dc57-fddd-49ec-bab5-953a03eebaa1 {{(pid=61962) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1919.273791] env[61962]: DEBUG nova.scheduler.client.report [None req-b03c4c30-9fd6-4923-9e36-c3edd6c4e2a5 tempest-ImagesTestJSON-240028984 tempest-ImagesTestJSON-240028984-project-member] Inventory has not changed for provider 5a20dc57-fddd-49ec-bab5-953a03eebaa1 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 96, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61962) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1919.290151] env[61962]: DEBUG oslo_concurrency.lockutils [None req-b03c4c30-9fd6-4923-9e36-c3edd6c4e2a5 tempest-ImagesTestJSON-240028984 tempest-ImagesTestJSON-240028984-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 0.345s {{(pid=61962) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1919.290151] env[61962]: ERROR nova.compute.manager [None req-b03c4c30-9fd6-4923-9e36-c3edd6c4e2a5 tempest-ImagesTestJSON-240028984 tempest-ImagesTestJSON-240028984-project-member] [instance: 1035139f-2193-4d1c-a220-369ef6096ae0] Failed to build and run instance: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1919.290151] env[61962]: Faults: ['InvalidArgument'] [ 1919.290151] env[61962]: ERROR nova.compute.manager [instance: 1035139f-2193-4d1c-a220-369ef6096ae0] Traceback (most recent call last): [ 1919.290151] env[61962]: ERROR nova.compute.manager [instance: 1035139f-2193-4d1c-a220-369ef6096ae0] File "/opt/stack/nova/nova/compute/manager.py", line 2633, in _build_and_run_instance [ 1919.290151] env[61962]: ERROR nova.compute.manager [instance: 1035139f-2193-4d1c-a220-369ef6096ae0] self.driver.spawn(context, instance, image_meta, [ 1919.290151] env[61962]: ERROR nova.compute.manager [instance: 1035139f-2193-4d1c-a220-369ef6096ae0] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 1919.290151] env[61962]: ERROR nova.compute.manager [instance: 1035139f-2193-4d1c-a220-369ef6096ae0] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1919.290151] env[61962]: ERROR nova.compute.manager [instance: 1035139f-2193-4d1c-a220-369ef6096ae0] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 1919.290151] env[61962]: ERROR nova.compute.manager [instance: 1035139f-2193-4d1c-a220-369ef6096ae0] self._fetch_image_if_missing(context, vi) [ 1919.290508] env[61962]: ERROR nova.compute.manager [instance: 1035139f-2193-4d1c-a220-369ef6096ae0] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 1919.290508] env[61962]: ERROR nova.compute.manager [instance: 1035139f-2193-4d1c-a220-369ef6096ae0] image_cache(vi, tmp_image_ds_loc) [ 1919.290508] env[61962]: ERROR nova.compute.manager [instance: 1035139f-2193-4d1c-a220-369ef6096ae0] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 1919.290508] env[61962]: ERROR nova.compute.manager [instance: 1035139f-2193-4d1c-a220-369ef6096ae0] vm_util.copy_virtual_disk( [ 1919.290508] env[61962]: ERROR nova.compute.manager [instance: 1035139f-2193-4d1c-a220-369ef6096ae0] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 1919.290508] env[61962]: ERROR nova.compute.manager [instance: 1035139f-2193-4d1c-a220-369ef6096ae0] session._wait_for_task(vmdk_copy_task) [ 1919.290508] env[61962]: ERROR nova.compute.manager [instance: 1035139f-2193-4d1c-a220-369ef6096ae0] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 1919.290508] env[61962]: ERROR nova.compute.manager [instance: 1035139f-2193-4d1c-a220-369ef6096ae0] return self.wait_for_task(task_ref) [ 1919.290508] env[61962]: ERROR nova.compute.manager [instance: 1035139f-2193-4d1c-a220-369ef6096ae0] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 1919.290508] env[61962]: ERROR nova.compute.manager [instance: 1035139f-2193-4d1c-a220-369ef6096ae0] return evt.wait() [ 1919.290508] env[61962]: ERROR nova.compute.manager [instance: 1035139f-2193-4d1c-a220-369ef6096ae0] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 1919.290508] env[61962]: ERROR nova.compute.manager [instance: 1035139f-2193-4d1c-a220-369ef6096ae0] result = hub.switch() [ 1919.290508] env[61962]: ERROR nova.compute.manager [instance: 1035139f-2193-4d1c-a220-369ef6096ae0] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 1919.291024] env[61962]: ERROR nova.compute.manager [instance: 1035139f-2193-4d1c-a220-369ef6096ae0] return self.greenlet.switch() [ 1919.291024] env[61962]: ERROR nova.compute.manager [instance: 1035139f-2193-4d1c-a220-369ef6096ae0] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 1919.291024] env[61962]: ERROR nova.compute.manager [instance: 1035139f-2193-4d1c-a220-369ef6096ae0] self.f(*self.args, **self.kw) [ 1919.291024] env[61962]: ERROR nova.compute.manager [instance: 1035139f-2193-4d1c-a220-369ef6096ae0] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 1919.291024] env[61962]: ERROR nova.compute.manager [instance: 1035139f-2193-4d1c-a220-369ef6096ae0] raise exceptions.translate_fault(task_info.error) [ 1919.291024] env[61962]: ERROR nova.compute.manager [instance: 1035139f-2193-4d1c-a220-369ef6096ae0] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1919.291024] env[61962]: ERROR nova.compute.manager [instance: 1035139f-2193-4d1c-a220-369ef6096ae0] Faults: ['InvalidArgument'] [ 1919.291024] env[61962]: ERROR nova.compute.manager [instance: 1035139f-2193-4d1c-a220-369ef6096ae0] [ 1919.291024] env[61962]: DEBUG nova.compute.utils [None req-b03c4c30-9fd6-4923-9e36-c3edd6c4e2a5 tempest-ImagesTestJSON-240028984 tempest-ImagesTestJSON-240028984-project-member] [instance: 1035139f-2193-4d1c-a220-369ef6096ae0] VimFaultException {{(pid=61962) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 1919.295158] env[61962]: DEBUG nova.compute.manager [None req-b03c4c30-9fd6-4923-9e36-c3edd6c4e2a5 tempest-ImagesTestJSON-240028984 tempest-ImagesTestJSON-240028984-project-member] [instance: 1035139f-2193-4d1c-a220-369ef6096ae0] Build of instance 1035139f-2193-4d1c-a220-369ef6096ae0 was re-scheduled: A specified parameter was not correct: fileType [ 1919.295158] env[61962]: Faults: ['InvalidArgument'] {{(pid=61962) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2472}} [ 1919.295158] env[61962]: DEBUG nova.compute.manager [None req-b03c4c30-9fd6-4923-9e36-c3edd6c4e2a5 tempest-ImagesTestJSON-240028984 tempest-ImagesTestJSON-240028984-project-member] [instance: 1035139f-2193-4d1c-a220-369ef6096ae0] Unplugging VIFs for instance {{(pid=61962) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:2998}} [ 1919.295158] env[61962]: DEBUG nova.compute.manager [None req-b03c4c30-9fd6-4923-9e36-c3edd6c4e2a5 tempest-ImagesTestJSON-240028984 tempest-ImagesTestJSON-240028984-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=61962) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3021}} [ 1919.295158] env[61962]: DEBUG nova.compute.manager [None req-b03c4c30-9fd6-4923-9e36-c3edd6c4e2a5 tempest-ImagesTestJSON-240028984 tempest-ImagesTestJSON-240028984-project-member] [instance: 1035139f-2193-4d1c-a220-369ef6096ae0] Deallocating network for instance {{(pid=61962) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2281}} [ 1919.295483] env[61962]: DEBUG nova.network.neutron [None req-b03c4c30-9fd6-4923-9e36-c3edd6c4e2a5 tempest-ImagesTestJSON-240028984 tempest-ImagesTestJSON-240028984-project-member] [instance: 1035139f-2193-4d1c-a220-369ef6096ae0] deallocate_for_instance() {{(pid=61962) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1919.633975] env[61962]: DEBUG nova.network.neutron [None req-b03c4c30-9fd6-4923-9e36-c3edd6c4e2a5 tempest-ImagesTestJSON-240028984 tempest-ImagesTestJSON-240028984-project-member] [instance: 1035139f-2193-4d1c-a220-369ef6096ae0] Updating instance_info_cache with network_info: [] {{(pid=61962) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1919.645997] env[61962]: INFO nova.compute.manager [None req-b03c4c30-9fd6-4923-9e36-c3edd6c4e2a5 tempest-ImagesTestJSON-240028984 tempest-ImagesTestJSON-240028984-project-member] [instance: 1035139f-2193-4d1c-a220-369ef6096ae0] Took 0.35 seconds to deallocate network for instance. [ 1919.760766] env[61962]: INFO nova.scheduler.client.report [None req-b03c4c30-9fd6-4923-9e36-c3edd6c4e2a5 tempest-ImagesTestJSON-240028984 tempest-ImagesTestJSON-240028984-project-member] Deleted allocations for instance 1035139f-2193-4d1c-a220-369ef6096ae0 [ 1919.786074] env[61962]: DEBUG oslo_concurrency.lockutils [None req-b03c4c30-9fd6-4923-9e36-c3edd6c4e2a5 tempest-ImagesTestJSON-240028984 tempest-ImagesTestJSON-240028984-project-member] Lock "1035139f-2193-4d1c-a220-369ef6096ae0" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 634.083s {{(pid=61962) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1919.786351] env[61962]: DEBUG oslo_concurrency.lockutils [None req-99153f52-9f3c-48b2-8848-6a1d1761c32a tempest-ImagesTestJSON-240028984 tempest-ImagesTestJSON-240028984-project-member] Lock "1035139f-2193-4d1c-a220-369ef6096ae0" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 437.768s {{(pid=61962) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1919.786579] env[61962]: DEBUG oslo_concurrency.lockutils [None req-99153f52-9f3c-48b2-8848-6a1d1761c32a tempest-ImagesTestJSON-240028984 tempest-ImagesTestJSON-240028984-project-member] Acquiring lock "1035139f-2193-4d1c-a220-369ef6096ae0-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=61962) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1919.786841] env[61962]: DEBUG oslo_concurrency.lockutils [None req-99153f52-9f3c-48b2-8848-6a1d1761c32a tempest-ImagesTestJSON-240028984 tempest-ImagesTestJSON-240028984-project-member] Lock "1035139f-2193-4d1c-a220-369ef6096ae0-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=61962) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1919.787027] env[61962]: DEBUG oslo_concurrency.lockutils [None req-99153f52-9f3c-48b2-8848-6a1d1761c32a tempest-ImagesTestJSON-240028984 tempest-ImagesTestJSON-240028984-project-member] Lock "1035139f-2193-4d1c-a220-369ef6096ae0-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=61962) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1919.789280] env[61962]: INFO nova.compute.manager [None req-99153f52-9f3c-48b2-8848-6a1d1761c32a tempest-ImagesTestJSON-240028984 tempest-ImagesTestJSON-240028984-project-member] [instance: 1035139f-2193-4d1c-a220-369ef6096ae0] Terminating instance [ 1919.791437] env[61962]: DEBUG nova.compute.manager [None req-99153f52-9f3c-48b2-8848-6a1d1761c32a tempest-ImagesTestJSON-240028984 tempest-ImagesTestJSON-240028984-project-member] [instance: 1035139f-2193-4d1c-a220-369ef6096ae0] Start destroying the instance on the hypervisor. {{(pid=61962) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3142}} [ 1919.791734] env[61962]: DEBUG nova.virt.vmwareapi.vmops [None req-99153f52-9f3c-48b2-8848-6a1d1761c32a tempest-ImagesTestJSON-240028984 tempest-ImagesTestJSON-240028984-project-member] [instance: 1035139f-2193-4d1c-a220-369ef6096ae0] Destroying instance {{(pid=61962) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1919.791895] env[61962]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-7295d975-d8a6-4b0a-aa00-ed6b7eecfb96 {{(pid=61962) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1919.802373] env[61962]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cd7d23bf-7d8a-46e5-b565-43598c129b1e {{(pid=61962) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1919.833392] env[61962]: WARNING nova.virt.vmwareapi.vmops [None req-99153f52-9f3c-48b2-8848-6a1d1761c32a tempest-ImagesTestJSON-240028984 tempest-ImagesTestJSON-240028984-project-member] [instance: 1035139f-2193-4d1c-a220-369ef6096ae0] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 1035139f-2193-4d1c-a220-369ef6096ae0 could not be found. [ 1919.833596] env[61962]: DEBUG nova.virt.vmwareapi.vmops [None req-99153f52-9f3c-48b2-8848-6a1d1761c32a tempest-ImagesTestJSON-240028984 tempest-ImagesTestJSON-240028984-project-member] [instance: 1035139f-2193-4d1c-a220-369ef6096ae0] Instance destroyed {{(pid=61962) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1919.833780] env[61962]: INFO nova.compute.manager [None req-99153f52-9f3c-48b2-8848-6a1d1761c32a tempest-ImagesTestJSON-240028984 tempest-ImagesTestJSON-240028984-project-member] [instance: 1035139f-2193-4d1c-a220-369ef6096ae0] Took 0.04 seconds to destroy the instance on the hypervisor. [ 1919.834041] env[61962]: DEBUG oslo.service.loopingcall [None req-99153f52-9f3c-48b2-8848-6a1d1761c32a tempest-ImagesTestJSON-240028984 tempest-ImagesTestJSON-240028984-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61962) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1919.834295] env[61962]: DEBUG nova.compute.manager [-] [instance: 1035139f-2193-4d1c-a220-369ef6096ae0] Deallocating network for instance {{(pid=61962) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2281}} [ 1919.834387] env[61962]: DEBUG nova.network.neutron [-] [instance: 1035139f-2193-4d1c-a220-369ef6096ae0] deallocate_for_instance() {{(pid=61962) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1919.860582] env[61962]: DEBUG nova.network.neutron [-] [instance: 1035139f-2193-4d1c-a220-369ef6096ae0] Updating instance_info_cache with network_info: [] {{(pid=61962) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1919.869701] env[61962]: INFO nova.compute.manager [-] [instance: 1035139f-2193-4d1c-a220-369ef6096ae0] Took 0.04 seconds to deallocate network for instance. [ 1919.961207] env[61962]: DEBUG oslo_concurrency.lockutils [None req-99153f52-9f3c-48b2-8848-6a1d1761c32a tempest-ImagesTestJSON-240028984 tempest-ImagesTestJSON-240028984-project-member] Lock "1035139f-2193-4d1c-a220-369ef6096ae0" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 0.175s {{(pid=61962) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1919.962112] env[61962]: DEBUG oslo_concurrency.lockutils [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Lock "1035139f-2193-4d1c-a220-369ef6096ae0" acquired by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: waited 182.444s {{(pid=61962) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1919.962308] env[61962]: INFO nova.compute.manager [None req-ba337279-4320-40ca-b616-7e590432f203 None None] [instance: 1035139f-2193-4d1c-a220-369ef6096ae0] During sync_power_state the instance has a pending task (deleting). Skip. [ 1919.962482] env[61962]: DEBUG oslo_concurrency.lockutils [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Lock "1035139f-2193-4d1c-a220-369ef6096ae0" "released" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: held 0.000s {{(pid=61962) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1951.158702] env[61962]: DEBUG oslo_service.periodic_task [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Running periodic task ComputeManager.update_available_resource {{(pid=61962) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1951.171893] env[61962]: DEBUG oslo_concurrency.lockutils [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=61962) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1951.172198] env[61962]: DEBUG oslo_concurrency.lockutils [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=61962) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1951.172380] env[61962]: DEBUG oslo_concurrency.lockutils [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=61962) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1951.172560] env[61962]: DEBUG nova.compute.resource_tracker [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=61962) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 1951.173705] env[61962]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-82c5eac3-acb0-43e8-b6ca-ed3f7268c695 {{(pid=61962) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1951.183013] env[61962]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-83ce201d-a7e6-4fa1-9eca-cc7daec100d7 {{(pid=61962) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1951.198713] env[61962]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4d996c00-38dc-4368-b418-2ef329fa4a5f {{(pid=61962) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1951.205948] env[61962]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6bef280c-5f8b-46e9-8f93-818316ebafb1 {{(pid=61962) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1951.235783] env[61962]: DEBUG nova.compute.resource_tracker [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=180572MB free_disk=96GB free_vcpus=48 pci_devices=None {{(pid=61962) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 1951.235979] env[61962]: DEBUG oslo_concurrency.lockutils [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=61962) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1951.236145] env[61962]: DEBUG oslo_concurrency.lockutils [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=61962) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1951.310970] env[61962]: DEBUG nova.compute.resource_tracker [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Instance 53048279-c3ea-484f-9a3e-049b12892f7a actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61962) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 1951.311147] env[61962]: DEBUG nova.compute.resource_tracker [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Instance 25f6fa54-3d0d-4470-b5c9-06ba0ad2663d actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61962) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 1951.311277] env[61962]: DEBUG nova.compute.resource_tracker [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Instance 1e71cc99-f026-4197-b7f1-ae18f7f99af6 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61962) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 1951.311399] env[61962]: DEBUG nova.compute.resource_tracker [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Instance 2cf5409f-eaff-4b63-b8bb-eb15a7a81760 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61962) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 1951.311517] env[61962]: DEBUG nova.compute.resource_tracker [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Instance 02c198ea-61de-4e70-866c-39781d353d04 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61962) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 1951.311632] env[61962]: DEBUG nova.compute.resource_tracker [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Instance 85003695-cb3e-4bce-9a3a-a1d799b36369 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61962) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 1951.311745] env[61962]: DEBUG nova.compute.resource_tracker [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Instance 1a74c820-67e9-424c-b4e6-333db3449251 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61962) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 1951.311934] env[61962]: DEBUG nova.compute.resource_tracker [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Total usable vcpus: 48, total allocated vcpus: 7 {{(pid=61962) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 1951.312104] env[61962]: DEBUG nova.compute.resource_tracker [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=1408MB phys_disk=100GB used_disk=7GB total_vcpus=48 used_vcpus=7 pci_stats=[] stats={'failed_builds': '81', 'num_instances': '7', 'num_vm_building': '7', 'num_task_deleting': '6', 'num_os_type_None': '7', 'num_proj_88a7b37d54f047149b05fc80e0b51601': '1', 'io_workload': '7', 'num_proj_40bdd99e47aa40a28edd38c6b4a09a5f': '1', 'num_proj_56bb35c701074f02820d93ad5911d089': '2', 'num_proj_cc34536d51ef4a22aef0b274ba5aa28e': '2', 'num_proj_e58b68272c404983993a09ad12e8c03b': '1', 'num_task_spawning': '1'} {{(pid=61962) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 1951.410857] env[61962]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c5e67f55-135b-40cf-9c0d-8142820d098f {{(pid=61962) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1951.419407] env[61962]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-87c11747-74c7-4eae-9be6-9a0b543566d1 {{(pid=61962) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1951.451517] env[61962]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7b4bbdb3-39ec-4af6-babe-f4cb41c37dd0 {{(pid=61962) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1951.459618] env[61962]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c06670ac-7d50-41f3-9ba1-5eac4dff8555 {{(pid=61962) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1951.473429] env[61962]: DEBUG nova.compute.provider_tree [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Inventory has not changed in ProviderTree for provider: 5a20dc57-fddd-49ec-bab5-953a03eebaa1 {{(pid=61962) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1951.481980] env[61962]: DEBUG nova.scheduler.client.report [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Inventory has not changed for provider 5a20dc57-fddd-49ec-bab5-953a03eebaa1 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 96, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61962) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1951.497652] env[61962]: DEBUG nova.compute.resource_tracker [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=61962) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 1951.497829] env[61962]: DEBUG oslo_concurrency.lockutils [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 0.262s {{(pid=61962) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1954.498744] env[61962]: DEBUG oslo_service.periodic_task [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=61962) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1954.499155] env[61962]: DEBUG nova.compute.manager [None req-ba337279-4320-40ca-b616-7e590432f203 None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=61962) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10626}} [ 1961.153828] env[61962]: DEBUG oslo_service.periodic_task [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=61962) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1963.158487] env[61962]: DEBUG oslo_service.periodic_task [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=61962) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1963.158899] env[61962]: DEBUG oslo_service.periodic_task [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=61962) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1964.159077] env[61962]: DEBUG oslo_service.periodic_task [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=61962) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1965.160202] env[61962]: DEBUG oslo_service.periodic_task [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=61962) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1965.160463] env[61962]: DEBUG nova.compute.manager [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Starting heal instance info cache {{(pid=61962) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10007}} [ 1965.160536] env[61962]: DEBUG nova.compute.manager [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Rebuilding the list of instances to heal {{(pid=61962) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10011}} [ 1965.180394] env[61962]: DEBUG nova.compute.manager [None req-ba337279-4320-40ca-b616-7e590432f203 None None] [instance: 53048279-c3ea-484f-9a3e-049b12892f7a] Skipping network cache update for instance because it is Building. {{(pid=61962) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10020}} [ 1965.180605] env[61962]: DEBUG nova.compute.manager [None req-ba337279-4320-40ca-b616-7e590432f203 None None] [instance: 25f6fa54-3d0d-4470-b5c9-06ba0ad2663d] Skipping network cache update for instance because it is Building. {{(pid=61962) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10020}} [ 1965.180702] env[61962]: DEBUG nova.compute.manager [None req-ba337279-4320-40ca-b616-7e590432f203 None None] [instance: 1e71cc99-f026-4197-b7f1-ae18f7f99af6] Skipping network cache update for instance because it is Building. {{(pid=61962) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10020}} [ 1965.180824] env[61962]: DEBUG nova.compute.manager [None req-ba337279-4320-40ca-b616-7e590432f203 None None] [instance: 2cf5409f-eaff-4b63-b8bb-eb15a7a81760] Skipping network cache update for instance because it is Building. {{(pid=61962) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10020}} [ 1965.180951] env[61962]: DEBUG nova.compute.manager [None req-ba337279-4320-40ca-b616-7e590432f203 None None] [instance: 02c198ea-61de-4e70-866c-39781d353d04] Skipping network cache update for instance because it is Building. {{(pid=61962) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10020}} [ 1965.181087] env[61962]: DEBUG nova.compute.manager [None req-ba337279-4320-40ca-b616-7e590432f203 None None] [instance: 85003695-cb3e-4bce-9a3a-a1d799b36369] Skipping network cache update for instance because it is Building. {{(pid=61962) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10020}} [ 1965.181242] env[61962]: DEBUG nova.compute.manager [None req-ba337279-4320-40ca-b616-7e590432f203 None None] [instance: 1a74c820-67e9-424c-b4e6-333db3449251] Skipping network cache update for instance because it is Building. {{(pid=61962) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10020}} [ 1965.181425] env[61962]: DEBUG nova.compute.manager [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Didn't find any instances for network info cache update. {{(pid=61962) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10093}} [ 1967.175905] env[61962]: DEBUG oslo_service.periodic_task [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Running periodic task ComputeManager._sync_scheduler_instance_info {{(pid=61962) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1967.814017] env[61962]: WARNING oslo_vmware.rw_handles [None req-e21c42e3-3594-408e-b517-f54e516b53cd tempest-ServersNegativeTestMultiTenantJSON-212472390 tempest-ServersNegativeTestMultiTenantJSON-212472390-project-member] Error occurred while reading the HTTP response.: http.client.RemoteDisconnected: Remote end closed connection without response [ 1967.814017] env[61962]: ERROR oslo_vmware.rw_handles Traceback (most recent call last): [ 1967.814017] env[61962]: ERROR oslo_vmware.rw_handles File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py", line 283, in close [ 1967.814017] env[61962]: ERROR oslo_vmware.rw_handles self._conn.getresponse() [ 1967.814017] env[61962]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 1375, in getresponse [ 1967.814017] env[61962]: ERROR oslo_vmware.rw_handles response.begin() [ 1967.814017] env[61962]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 318, in begin [ 1967.814017] env[61962]: ERROR oslo_vmware.rw_handles version, status, reason = self._read_status() [ 1967.814017] env[61962]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 287, in _read_status [ 1967.814017] env[61962]: ERROR oslo_vmware.rw_handles raise RemoteDisconnected("Remote end closed connection without" [ 1967.814017] env[61962]: ERROR oslo_vmware.rw_handles http.client.RemoteDisconnected: Remote end closed connection without response [ 1967.814017] env[61962]: ERROR oslo_vmware.rw_handles [ 1967.814584] env[61962]: DEBUG nova.virt.vmwareapi.images [None req-e21c42e3-3594-408e-b517-f54e516b53cd tempest-ServersNegativeTestMultiTenantJSON-212472390 tempest-ServersNegativeTestMultiTenantJSON-212472390-project-member] [instance: 53048279-c3ea-484f-9a3e-049b12892f7a] Downloaded image file data f684bb17-3ab2-4bd5-a19e-4c36c57d0ebe to vmware_temp/8a935267-43f5-469e-b99f-2c374b34ba6b/f684bb17-3ab2-4bd5-a19e-4c36c57d0ebe/tmp-sparse.vmdk on the data store datastore2 {{(pid=61962) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:258}} [ 1967.816476] env[61962]: DEBUG nova.virt.vmwareapi.vmops [None req-e21c42e3-3594-408e-b517-f54e516b53cd tempest-ServersNegativeTestMultiTenantJSON-212472390 tempest-ServersNegativeTestMultiTenantJSON-212472390-project-member] [instance: 53048279-c3ea-484f-9a3e-049b12892f7a] Caching image {{(pid=61962) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 1967.816720] env[61962]: DEBUG nova.virt.vmwareapi.vm_util [None req-e21c42e3-3594-408e-b517-f54e516b53cd tempest-ServersNegativeTestMultiTenantJSON-212472390 tempest-ServersNegativeTestMultiTenantJSON-212472390-project-member] Copying Virtual Disk [datastore2] vmware_temp/8a935267-43f5-469e-b99f-2c374b34ba6b/f684bb17-3ab2-4bd5-a19e-4c36c57d0ebe/tmp-sparse.vmdk to [datastore2] vmware_temp/8a935267-43f5-469e-b99f-2c374b34ba6b/f684bb17-3ab2-4bd5-a19e-4c36c57d0ebe/f684bb17-3ab2-4bd5-a19e-4c36c57d0ebe.vmdk {{(pid=61962) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 1967.817013] env[61962]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-5e6609d4-6b26-4daf-92a1-dc0e5db6ca42 {{(pid=61962) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1967.826181] env[61962]: DEBUG oslo_vmware.api [None req-e21c42e3-3594-408e-b517-f54e516b53cd tempest-ServersNegativeTestMultiTenantJSON-212472390 tempest-ServersNegativeTestMultiTenantJSON-212472390-project-member] Waiting for the task: (returnval){ [ 1967.826181] env[61962]: value = "task-4892090" [ 1967.826181] env[61962]: _type = "Task" [ 1967.826181] env[61962]: } to complete. {{(pid=61962) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1967.835692] env[61962]: DEBUG oslo_vmware.api [None req-e21c42e3-3594-408e-b517-f54e516b53cd tempest-ServersNegativeTestMultiTenantJSON-212472390 tempest-ServersNegativeTestMultiTenantJSON-212472390-project-member] Task: {'id': task-4892090, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61962) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1968.158710] env[61962]: DEBUG oslo_service.periodic_task [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=61962) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1968.337057] env[61962]: DEBUG oslo_vmware.exceptions [None req-e21c42e3-3594-408e-b517-f54e516b53cd tempest-ServersNegativeTestMultiTenantJSON-212472390 tempest-ServersNegativeTestMultiTenantJSON-212472390-project-member] Fault InvalidArgument not matched. {{(pid=61962) get_fault_class /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/exceptions.py:290}} [ 1968.337473] env[61962]: DEBUG oslo_concurrency.lockutils [None req-e21c42e3-3594-408e-b517-f54e516b53cd tempest-ServersNegativeTestMultiTenantJSON-212472390 tempest-ServersNegativeTestMultiTenantJSON-212472390-project-member] Releasing lock "[datastore2] devstack-image-cache_base/f684bb17-3ab2-4bd5-a19e-4c36c57d0ebe/f684bb17-3ab2-4bd5-a19e-4c36c57d0ebe.vmdk" {{(pid=61962) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1968.337852] env[61962]: ERROR nova.compute.manager [None req-e21c42e3-3594-408e-b517-f54e516b53cd tempest-ServersNegativeTestMultiTenantJSON-212472390 tempest-ServersNegativeTestMultiTenantJSON-212472390-project-member] [instance: 53048279-c3ea-484f-9a3e-049b12892f7a] Instance failed to spawn: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1968.337852] env[61962]: Faults: ['InvalidArgument'] [ 1968.337852] env[61962]: ERROR nova.compute.manager [instance: 53048279-c3ea-484f-9a3e-049b12892f7a] Traceback (most recent call last): [ 1968.337852] env[61962]: ERROR nova.compute.manager [instance: 53048279-c3ea-484f-9a3e-049b12892f7a] File "/opt/stack/nova/nova/compute/manager.py", line 2886, in _build_resources [ 1968.337852] env[61962]: ERROR nova.compute.manager [instance: 53048279-c3ea-484f-9a3e-049b12892f7a] yield resources [ 1968.337852] env[61962]: ERROR nova.compute.manager [instance: 53048279-c3ea-484f-9a3e-049b12892f7a] File "/opt/stack/nova/nova/compute/manager.py", line 2633, in _build_and_run_instance [ 1968.337852] env[61962]: ERROR nova.compute.manager [instance: 53048279-c3ea-484f-9a3e-049b12892f7a] self.driver.spawn(context, instance, image_meta, [ 1968.337852] env[61962]: ERROR nova.compute.manager [instance: 53048279-c3ea-484f-9a3e-049b12892f7a] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 1968.337852] env[61962]: ERROR nova.compute.manager [instance: 53048279-c3ea-484f-9a3e-049b12892f7a] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1968.337852] env[61962]: ERROR nova.compute.manager [instance: 53048279-c3ea-484f-9a3e-049b12892f7a] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 1968.337852] env[61962]: ERROR nova.compute.manager [instance: 53048279-c3ea-484f-9a3e-049b12892f7a] self._fetch_image_if_missing(context, vi) [ 1968.337852] env[61962]: ERROR nova.compute.manager [instance: 53048279-c3ea-484f-9a3e-049b12892f7a] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 1968.338275] env[61962]: ERROR nova.compute.manager [instance: 53048279-c3ea-484f-9a3e-049b12892f7a] image_cache(vi, tmp_image_ds_loc) [ 1968.338275] env[61962]: ERROR nova.compute.manager [instance: 53048279-c3ea-484f-9a3e-049b12892f7a] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 1968.338275] env[61962]: ERROR nova.compute.manager [instance: 53048279-c3ea-484f-9a3e-049b12892f7a] vm_util.copy_virtual_disk( [ 1968.338275] env[61962]: ERROR nova.compute.manager [instance: 53048279-c3ea-484f-9a3e-049b12892f7a] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 1968.338275] env[61962]: ERROR nova.compute.manager [instance: 53048279-c3ea-484f-9a3e-049b12892f7a] session._wait_for_task(vmdk_copy_task) [ 1968.338275] env[61962]: ERROR nova.compute.manager [instance: 53048279-c3ea-484f-9a3e-049b12892f7a] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 1968.338275] env[61962]: ERROR nova.compute.manager [instance: 53048279-c3ea-484f-9a3e-049b12892f7a] return self.wait_for_task(task_ref) [ 1968.338275] env[61962]: ERROR nova.compute.manager [instance: 53048279-c3ea-484f-9a3e-049b12892f7a] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 1968.338275] env[61962]: ERROR nova.compute.manager [instance: 53048279-c3ea-484f-9a3e-049b12892f7a] return evt.wait() [ 1968.338275] env[61962]: ERROR nova.compute.manager [instance: 53048279-c3ea-484f-9a3e-049b12892f7a] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 1968.338275] env[61962]: ERROR nova.compute.manager [instance: 53048279-c3ea-484f-9a3e-049b12892f7a] result = hub.switch() [ 1968.338275] env[61962]: ERROR nova.compute.manager [instance: 53048279-c3ea-484f-9a3e-049b12892f7a] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 1968.338275] env[61962]: ERROR nova.compute.manager [instance: 53048279-c3ea-484f-9a3e-049b12892f7a] return self.greenlet.switch() [ 1968.338668] env[61962]: ERROR nova.compute.manager [instance: 53048279-c3ea-484f-9a3e-049b12892f7a] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 1968.338668] env[61962]: ERROR nova.compute.manager [instance: 53048279-c3ea-484f-9a3e-049b12892f7a] self.f(*self.args, **self.kw) [ 1968.338668] env[61962]: ERROR nova.compute.manager [instance: 53048279-c3ea-484f-9a3e-049b12892f7a] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 1968.338668] env[61962]: ERROR nova.compute.manager [instance: 53048279-c3ea-484f-9a3e-049b12892f7a] raise exceptions.translate_fault(task_info.error) [ 1968.338668] env[61962]: ERROR nova.compute.manager [instance: 53048279-c3ea-484f-9a3e-049b12892f7a] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1968.338668] env[61962]: ERROR nova.compute.manager [instance: 53048279-c3ea-484f-9a3e-049b12892f7a] Faults: ['InvalidArgument'] [ 1968.338668] env[61962]: ERROR nova.compute.manager [instance: 53048279-c3ea-484f-9a3e-049b12892f7a] [ 1968.338668] env[61962]: INFO nova.compute.manager [None req-e21c42e3-3594-408e-b517-f54e516b53cd tempest-ServersNegativeTestMultiTenantJSON-212472390 tempest-ServersNegativeTestMultiTenantJSON-212472390-project-member] [instance: 53048279-c3ea-484f-9a3e-049b12892f7a] Terminating instance [ 1968.339874] env[61962]: DEBUG oslo_concurrency.lockutils [None req-14df904d-2edb-4fe8-be90-6d0f9cac5476 tempest-AttachVolumeNegativeTest-1446100430 tempest-AttachVolumeNegativeTest-1446100430-project-member] Acquired lock "[datastore2] devstack-image-cache_base/f684bb17-3ab2-4bd5-a19e-4c36c57d0ebe/f684bb17-3ab2-4bd5-a19e-4c36c57d0ebe.vmdk" {{(pid=61962) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1968.340108] env[61962]: DEBUG nova.virt.vmwareapi.ds_util [None req-14df904d-2edb-4fe8-be90-6d0f9cac5476 tempest-AttachVolumeNegativeTest-1446100430 tempest-AttachVolumeNegativeTest-1446100430-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=61962) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1968.340371] env[61962]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-369dac6b-36e7-4a6d-a9a4-1e3f1c516ea4 {{(pid=61962) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1968.342771] env[61962]: DEBUG nova.compute.manager [None req-e21c42e3-3594-408e-b517-f54e516b53cd tempest-ServersNegativeTestMultiTenantJSON-212472390 tempest-ServersNegativeTestMultiTenantJSON-212472390-project-member] [instance: 53048279-c3ea-484f-9a3e-049b12892f7a] Start destroying the instance on the hypervisor. {{(pid=61962) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3142}} [ 1968.342968] env[61962]: DEBUG nova.virt.vmwareapi.vmops [None req-e21c42e3-3594-408e-b517-f54e516b53cd tempest-ServersNegativeTestMultiTenantJSON-212472390 tempest-ServersNegativeTestMultiTenantJSON-212472390-project-member] [instance: 53048279-c3ea-484f-9a3e-049b12892f7a] Destroying instance {{(pid=61962) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1968.343750] env[61962]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-11e6adc5-4e59-428d-bf67-0c93a1069756 {{(pid=61962) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1968.352213] env[61962]: DEBUG nova.virt.vmwareapi.vmops [None req-e21c42e3-3594-408e-b517-f54e516b53cd tempest-ServersNegativeTestMultiTenantJSON-212472390 tempest-ServersNegativeTestMultiTenantJSON-212472390-project-member] [instance: 53048279-c3ea-484f-9a3e-049b12892f7a] Unregistering the VM {{(pid=61962) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 1968.353373] env[61962]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-b23437bf-bab8-4fc8-b90b-fd31d8dd5cd5 {{(pid=61962) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1968.354958] env[61962]: DEBUG nova.virt.vmwareapi.ds_util [None req-14df904d-2edb-4fe8-be90-6d0f9cac5476 tempest-AttachVolumeNegativeTest-1446100430 tempest-AttachVolumeNegativeTest-1446100430-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=61962) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1968.355155] env[61962]: DEBUG nova.virt.vmwareapi.vmops [None req-14df904d-2edb-4fe8-be90-6d0f9cac5476 tempest-AttachVolumeNegativeTest-1446100430 tempest-AttachVolumeNegativeTest-1446100430-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=61962) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 1968.355853] env[61962]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-ec5b9a24-ab4c-4127-a128-04de3235185b {{(pid=61962) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1968.361635] env[61962]: DEBUG oslo_vmware.api [None req-14df904d-2edb-4fe8-be90-6d0f9cac5476 tempest-AttachVolumeNegativeTest-1446100430 tempest-AttachVolumeNegativeTest-1446100430-project-member] Waiting for the task: (returnval){ [ 1968.361635] env[61962]: value = "session[5210918f-aadc-9b29-42fa-0929c2e42627]5282e460-8d27-3915-62a1-44eeb1c670eb" [ 1968.361635] env[61962]: _type = "Task" [ 1968.361635] env[61962]: } to complete. {{(pid=61962) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1968.370076] env[61962]: DEBUG oslo_vmware.api [None req-14df904d-2edb-4fe8-be90-6d0f9cac5476 tempest-AttachVolumeNegativeTest-1446100430 tempest-AttachVolumeNegativeTest-1446100430-project-member] Task: {'id': session[5210918f-aadc-9b29-42fa-0929c2e42627]5282e460-8d27-3915-62a1-44eeb1c670eb, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61962) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1968.434571] env[61962]: DEBUG nova.virt.vmwareapi.vmops [None req-e21c42e3-3594-408e-b517-f54e516b53cd tempest-ServersNegativeTestMultiTenantJSON-212472390 tempest-ServersNegativeTestMultiTenantJSON-212472390-project-member] [instance: 53048279-c3ea-484f-9a3e-049b12892f7a] Unregistered the VM {{(pid=61962) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 1968.434843] env[61962]: DEBUG nova.virt.vmwareapi.vmops [None req-e21c42e3-3594-408e-b517-f54e516b53cd tempest-ServersNegativeTestMultiTenantJSON-212472390 tempest-ServersNegativeTestMultiTenantJSON-212472390-project-member] [instance: 53048279-c3ea-484f-9a3e-049b12892f7a] Deleting contents of the VM from datastore datastore2 {{(pid=61962) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 1968.435113] env[61962]: DEBUG nova.virt.vmwareapi.ds_util [None req-e21c42e3-3594-408e-b517-f54e516b53cd tempest-ServersNegativeTestMultiTenantJSON-212472390 tempest-ServersNegativeTestMultiTenantJSON-212472390-project-member] Deleting the datastore file [datastore2] 53048279-c3ea-484f-9a3e-049b12892f7a {{(pid=61962) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1968.435399] env[61962]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-f50c4ff0-1512-4e5b-aad7-f32cbe5a87d3 {{(pid=61962) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1968.442531] env[61962]: DEBUG oslo_vmware.api [None req-e21c42e3-3594-408e-b517-f54e516b53cd tempest-ServersNegativeTestMultiTenantJSON-212472390 tempest-ServersNegativeTestMultiTenantJSON-212472390-project-member] Waiting for the task: (returnval){ [ 1968.442531] env[61962]: value = "task-4892092" [ 1968.442531] env[61962]: _type = "Task" [ 1968.442531] env[61962]: } to complete. {{(pid=61962) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1968.452059] env[61962]: DEBUG oslo_vmware.api [None req-e21c42e3-3594-408e-b517-f54e516b53cd tempest-ServersNegativeTestMultiTenantJSON-212472390 tempest-ServersNegativeTestMultiTenantJSON-212472390-project-member] Task: {'id': task-4892092, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61962) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1968.871990] env[61962]: DEBUG nova.virt.vmwareapi.vmops [None req-14df904d-2edb-4fe8-be90-6d0f9cac5476 tempest-AttachVolumeNegativeTest-1446100430 tempest-AttachVolumeNegativeTest-1446100430-project-member] [instance: 25f6fa54-3d0d-4470-b5c9-06ba0ad2663d] Preparing fetch location {{(pid=61962) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 1968.872267] env[61962]: DEBUG nova.virt.vmwareapi.ds_util [None req-14df904d-2edb-4fe8-be90-6d0f9cac5476 tempest-AttachVolumeNegativeTest-1446100430 tempest-AttachVolumeNegativeTest-1446100430-project-member] Creating directory with path [datastore2] vmware_temp/2319b4c9-861b-4fd9-ae92-914be600ed08/f684bb17-3ab2-4bd5-a19e-4c36c57d0ebe {{(pid=61962) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1968.872529] env[61962]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-43249f8d-ff18-4b92-8734-ef5df20d14a3 {{(pid=61962) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1968.888904] env[61962]: DEBUG nova.virt.vmwareapi.ds_util [None req-14df904d-2edb-4fe8-be90-6d0f9cac5476 tempest-AttachVolumeNegativeTest-1446100430 tempest-AttachVolumeNegativeTest-1446100430-project-member] Created directory with path [datastore2] vmware_temp/2319b4c9-861b-4fd9-ae92-914be600ed08/f684bb17-3ab2-4bd5-a19e-4c36c57d0ebe {{(pid=61962) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1968.889136] env[61962]: DEBUG nova.virt.vmwareapi.vmops [None req-14df904d-2edb-4fe8-be90-6d0f9cac5476 tempest-AttachVolumeNegativeTest-1446100430 tempest-AttachVolumeNegativeTest-1446100430-project-member] [instance: 25f6fa54-3d0d-4470-b5c9-06ba0ad2663d] Fetch image to [datastore2] vmware_temp/2319b4c9-861b-4fd9-ae92-914be600ed08/f684bb17-3ab2-4bd5-a19e-4c36c57d0ebe/tmp-sparse.vmdk {{(pid=61962) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 1968.889318] env[61962]: DEBUG nova.virt.vmwareapi.vmops [None req-14df904d-2edb-4fe8-be90-6d0f9cac5476 tempest-AttachVolumeNegativeTest-1446100430 tempest-AttachVolumeNegativeTest-1446100430-project-member] [instance: 25f6fa54-3d0d-4470-b5c9-06ba0ad2663d] Downloading image file data f684bb17-3ab2-4bd5-a19e-4c36c57d0ebe to [datastore2] vmware_temp/2319b4c9-861b-4fd9-ae92-914be600ed08/f684bb17-3ab2-4bd5-a19e-4c36c57d0ebe/tmp-sparse.vmdk on the data store datastore2 {{(pid=61962) _fetch_image_as_file /opt/stack/nova/nova/virt/vmwareapi/vmops.py:399}} [ 1968.890160] env[61962]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1a3ffd58-8ebd-445c-a730-614064857c15 {{(pid=61962) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1968.898227] env[61962]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2cdc6775-6aec-4992-ad46-27c71fce3ebe {{(pid=61962) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1968.908411] env[61962]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9cacd050-ae8f-49a8-8ef4-1ab871b65395 {{(pid=61962) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1968.939989] env[61962]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2025885f-f9c2-449e-aff3-391591640a4f {{(pid=61962) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1968.948551] env[61962]: DEBUG oslo_vmware.service [-] Invoking SessionManager.AcquireGenericServiceTicket with opID=oslo.vmware-f1e01120-b368-4a96-8760-bd06bab4f55a {{(pid=61962) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1968.953203] env[61962]: DEBUG oslo_vmware.api [None req-e21c42e3-3594-408e-b517-f54e516b53cd tempest-ServersNegativeTestMultiTenantJSON-212472390 tempest-ServersNegativeTestMultiTenantJSON-212472390-project-member] Task: {'id': task-4892092, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.083516} completed successfully. {{(pid=61962) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1968.953459] env[61962]: DEBUG nova.virt.vmwareapi.ds_util [None req-e21c42e3-3594-408e-b517-f54e516b53cd tempest-ServersNegativeTestMultiTenantJSON-212472390 tempest-ServersNegativeTestMultiTenantJSON-212472390-project-member] Deleted the datastore file {{(pid=61962) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1968.953638] env[61962]: DEBUG nova.virt.vmwareapi.vmops [None req-e21c42e3-3594-408e-b517-f54e516b53cd tempest-ServersNegativeTestMultiTenantJSON-212472390 tempest-ServersNegativeTestMultiTenantJSON-212472390-project-member] [instance: 53048279-c3ea-484f-9a3e-049b12892f7a] Deleted contents of the VM from datastore datastore2 {{(pid=61962) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 1968.953816] env[61962]: DEBUG nova.virt.vmwareapi.vmops [None req-e21c42e3-3594-408e-b517-f54e516b53cd tempest-ServersNegativeTestMultiTenantJSON-212472390 tempest-ServersNegativeTestMultiTenantJSON-212472390-project-member] [instance: 53048279-c3ea-484f-9a3e-049b12892f7a] Instance destroyed {{(pid=61962) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1968.953980] env[61962]: INFO nova.compute.manager [None req-e21c42e3-3594-408e-b517-f54e516b53cd tempest-ServersNegativeTestMultiTenantJSON-212472390 tempest-ServersNegativeTestMultiTenantJSON-212472390-project-member] [instance: 53048279-c3ea-484f-9a3e-049b12892f7a] Took 0.61 seconds to destroy the instance on the hypervisor. [ 1968.956143] env[61962]: DEBUG nova.compute.claims [None req-e21c42e3-3594-408e-b517-f54e516b53cd tempest-ServersNegativeTestMultiTenantJSON-212472390 tempest-ServersNegativeTestMultiTenantJSON-212472390-project-member] [instance: 53048279-c3ea-484f-9a3e-049b12892f7a] Aborting claim: {{(pid=61962) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 1968.956313] env[61962]: DEBUG oslo_concurrency.lockutils [None req-e21c42e3-3594-408e-b517-f54e516b53cd tempest-ServersNegativeTestMultiTenantJSON-212472390 tempest-ServersNegativeTestMultiTenantJSON-212472390-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=61962) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1968.956531] env[61962]: DEBUG oslo_concurrency.lockutils [None req-e21c42e3-3594-408e-b517-f54e516b53cd tempest-ServersNegativeTestMultiTenantJSON-212472390 tempest-ServersNegativeTestMultiTenantJSON-212472390-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 0.000s {{(pid=61962) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1968.972770] env[61962]: DEBUG nova.virt.vmwareapi.images [None req-14df904d-2edb-4fe8-be90-6d0f9cac5476 tempest-AttachVolumeNegativeTest-1446100430 tempest-AttachVolumeNegativeTest-1446100430-project-member] [instance: 25f6fa54-3d0d-4470-b5c9-06ba0ad2663d] Downloading image file data f684bb17-3ab2-4bd5-a19e-4c36c57d0ebe to the data store datastore2 {{(pid=61962) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:245}} [ 1969.120936] env[61962]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4ec3985b-f8fd-411d-a5b6-bc8b0e6ee78d {{(pid=61962) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1969.126345] env[61962]: DEBUG oslo_vmware.rw_handles [None req-14df904d-2edb-4fe8-be90-6d0f9cac5476 tempest-AttachVolumeNegativeTest-1446100430 tempest-AttachVolumeNegativeTest-1446100430-project-member] Creating HTTP connection to write to file with size = 21318656 and URL = https://esx7c2n2.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/2319b4c9-861b-4fd9-ae92-914be600ed08/f684bb17-3ab2-4bd5-a19e-4c36c57d0ebe/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=61962) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 1969.131315] env[61962]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-742f2450-2208-49ce-be74-9092ca716f5e {{(pid=61962) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1969.185179] env[61962]: DEBUG oslo_service.periodic_task [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=61962) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1969.214205] env[61962]: DEBUG oslo_vmware.rw_handles [None req-14df904d-2edb-4fe8-be90-6d0f9cac5476 tempest-AttachVolumeNegativeTest-1446100430 tempest-AttachVolumeNegativeTest-1446100430-project-member] Completed reading data from the image iterator. {{(pid=61962) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 1969.214381] env[61962]: DEBUG oslo_vmware.rw_handles [None req-14df904d-2edb-4fe8-be90-6d0f9cac5476 tempest-AttachVolumeNegativeTest-1446100430 tempest-AttachVolumeNegativeTest-1446100430-project-member] Closing write handle for https://esx7c2n2.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/2319b4c9-861b-4fd9-ae92-914be600ed08/f684bb17-3ab2-4bd5-a19e-4c36c57d0ebe/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=61962) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:281}} [ 1969.215243] env[61962]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-000bb6dd-fe61-4b13-bbd9-d88957e4f298 {{(pid=61962) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1969.223604] env[61962]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-16c67e4c-ab6b-4ca9-9b7c-37635ce66c6a {{(pid=61962) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1969.237704] env[61962]: DEBUG nova.compute.provider_tree [None req-e21c42e3-3594-408e-b517-f54e516b53cd tempest-ServersNegativeTestMultiTenantJSON-212472390 tempest-ServersNegativeTestMultiTenantJSON-212472390-project-member] Inventory has not changed in ProviderTree for provider: 5a20dc57-fddd-49ec-bab5-953a03eebaa1 {{(pid=61962) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1969.248743] env[61962]: DEBUG nova.scheduler.client.report [None req-e21c42e3-3594-408e-b517-f54e516b53cd tempest-ServersNegativeTestMultiTenantJSON-212472390 tempest-ServersNegativeTestMultiTenantJSON-212472390-project-member] Inventory has not changed for provider 5a20dc57-fddd-49ec-bab5-953a03eebaa1 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 96, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61962) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1969.264079] env[61962]: DEBUG oslo_concurrency.lockutils [None req-e21c42e3-3594-408e-b517-f54e516b53cd tempest-ServersNegativeTestMultiTenantJSON-212472390 tempest-ServersNegativeTestMultiTenantJSON-212472390-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 0.307s {{(pid=61962) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1969.264661] env[61962]: ERROR nova.compute.manager [None req-e21c42e3-3594-408e-b517-f54e516b53cd tempest-ServersNegativeTestMultiTenantJSON-212472390 tempest-ServersNegativeTestMultiTenantJSON-212472390-project-member] [instance: 53048279-c3ea-484f-9a3e-049b12892f7a] Failed to build and run instance: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1969.264661] env[61962]: Faults: ['InvalidArgument'] [ 1969.264661] env[61962]: ERROR nova.compute.manager [instance: 53048279-c3ea-484f-9a3e-049b12892f7a] Traceback (most recent call last): [ 1969.264661] env[61962]: ERROR nova.compute.manager [instance: 53048279-c3ea-484f-9a3e-049b12892f7a] File "/opt/stack/nova/nova/compute/manager.py", line 2633, in _build_and_run_instance [ 1969.264661] env[61962]: ERROR nova.compute.manager [instance: 53048279-c3ea-484f-9a3e-049b12892f7a] self.driver.spawn(context, instance, image_meta, [ 1969.264661] env[61962]: ERROR nova.compute.manager [instance: 53048279-c3ea-484f-9a3e-049b12892f7a] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 1969.264661] env[61962]: ERROR nova.compute.manager [instance: 53048279-c3ea-484f-9a3e-049b12892f7a] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1969.264661] env[61962]: ERROR nova.compute.manager [instance: 53048279-c3ea-484f-9a3e-049b12892f7a] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 1969.264661] env[61962]: ERROR nova.compute.manager [instance: 53048279-c3ea-484f-9a3e-049b12892f7a] self._fetch_image_if_missing(context, vi) [ 1969.264661] env[61962]: ERROR nova.compute.manager [instance: 53048279-c3ea-484f-9a3e-049b12892f7a] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 1969.264661] env[61962]: ERROR nova.compute.manager [instance: 53048279-c3ea-484f-9a3e-049b12892f7a] image_cache(vi, tmp_image_ds_loc) [ 1969.264661] env[61962]: ERROR nova.compute.manager [instance: 53048279-c3ea-484f-9a3e-049b12892f7a] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 1969.265115] env[61962]: ERROR nova.compute.manager [instance: 53048279-c3ea-484f-9a3e-049b12892f7a] vm_util.copy_virtual_disk( [ 1969.265115] env[61962]: ERROR nova.compute.manager [instance: 53048279-c3ea-484f-9a3e-049b12892f7a] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 1969.265115] env[61962]: ERROR nova.compute.manager [instance: 53048279-c3ea-484f-9a3e-049b12892f7a] session._wait_for_task(vmdk_copy_task) [ 1969.265115] env[61962]: ERROR nova.compute.manager [instance: 53048279-c3ea-484f-9a3e-049b12892f7a] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 1969.265115] env[61962]: ERROR nova.compute.manager [instance: 53048279-c3ea-484f-9a3e-049b12892f7a] return self.wait_for_task(task_ref) [ 1969.265115] env[61962]: ERROR nova.compute.manager [instance: 53048279-c3ea-484f-9a3e-049b12892f7a] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 1969.265115] env[61962]: ERROR nova.compute.manager [instance: 53048279-c3ea-484f-9a3e-049b12892f7a] return evt.wait() [ 1969.265115] env[61962]: ERROR nova.compute.manager [instance: 53048279-c3ea-484f-9a3e-049b12892f7a] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 1969.265115] env[61962]: ERROR nova.compute.manager [instance: 53048279-c3ea-484f-9a3e-049b12892f7a] result = hub.switch() [ 1969.265115] env[61962]: ERROR nova.compute.manager [instance: 53048279-c3ea-484f-9a3e-049b12892f7a] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 1969.265115] env[61962]: ERROR nova.compute.manager [instance: 53048279-c3ea-484f-9a3e-049b12892f7a] return self.greenlet.switch() [ 1969.265115] env[61962]: ERROR nova.compute.manager [instance: 53048279-c3ea-484f-9a3e-049b12892f7a] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 1969.265115] env[61962]: ERROR nova.compute.manager [instance: 53048279-c3ea-484f-9a3e-049b12892f7a] self.f(*self.args, **self.kw) [ 1969.265521] env[61962]: ERROR nova.compute.manager [instance: 53048279-c3ea-484f-9a3e-049b12892f7a] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 1969.265521] env[61962]: ERROR nova.compute.manager [instance: 53048279-c3ea-484f-9a3e-049b12892f7a] raise exceptions.translate_fault(task_info.error) [ 1969.265521] env[61962]: ERROR nova.compute.manager [instance: 53048279-c3ea-484f-9a3e-049b12892f7a] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1969.265521] env[61962]: ERROR nova.compute.manager [instance: 53048279-c3ea-484f-9a3e-049b12892f7a] Faults: ['InvalidArgument'] [ 1969.265521] env[61962]: ERROR nova.compute.manager [instance: 53048279-c3ea-484f-9a3e-049b12892f7a] [ 1969.265521] env[61962]: DEBUG nova.compute.utils [None req-e21c42e3-3594-408e-b517-f54e516b53cd tempest-ServersNegativeTestMultiTenantJSON-212472390 tempest-ServersNegativeTestMultiTenantJSON-212472390-project-member] [instance: 53048279-c3ea-484f-9a3e-049b12892f7a] VimFaultException {{(pid=61962) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 1969.266845] env[61962]: DEBUG nova.compute.manager [None req-e21c42e3-3594-408e-b517-f54e516b53cd tempest-ServersNegativeTestMultiTenantJSON-212472390 tempest-ServersNegativeTestMultiTenantJSON-212472390-project-member] [instance: 53048279-c3ea-484f-9a3e-049b12892f7a] Build of instance 53048279-c3ea-484f-9a3e-049b12892f7a was re-scheduled: A specified parameter was not correct: fileType [ 1969.266845] env[61962]: Faults: ['InvalidArgument'] {{(pid=61962) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2472}} [ 1969.267273] env[61962]: DEBUG nova.compute.manager [None req-e21c42e3-3594-408e-b517-f54e516b53cd tempest-ServersNegativeTestMultiTenantJSON-212472390 tempest-ServersNegativeTestMultiTenantJSON-212472390-project-member] [instance: 53048279-c3ea-484f-9a3e-049b12892f7a] Unplugging VIFs for instance {{(pid=61962) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:2998}} [ 1969.267471] env[61962]: DEBUG nova.compute.manager [None req-e21c42e3-3594-408e-b517-f54e516b53cd tempest-ServersNegativeTestMultiTenantJSON-212472390 tempest-ServersNegativeTestMultiTenantJSON-212472390-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=61962) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3021}} [ 1969.267653] env[61962]: DEBUG nova.compute.manager [None req-e21c42e3-3594-408e-b517-f54e516b53cd tempest-ServersNegativeTestMultiTenantJSON-212472390 tempest-ServersNegativeTestMultiTenantJSON-212472390-project-member] [instance: 53048279-c3ea-484f-9a3e-049b12892f7a] Deallocating network for instance {{(pid=61962) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2281}} [ 1969.267818] env[61962]: DEBUG nova.network.neutron [None req-e21c42e3-3594-408e-b517-f54e516b53cd tempest-ServersNegativeTestMultiTenantJSON-212472390 tempest-ServersNegativeTestMultiTenantJSON-212472390-project-member] [instance: 53048279-c3ea-484f-9a3e-049b12892f7a] deallocate_for_instance() {{(pid=61962) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1969.685380] env[61962]: DEBUG nova.network.neutron [None req-e21c42e3-3594-408e-b517-f54e516b53cd tempest-ServersNegativeTestMultiTenantJSON-212472390 tempest-ServersNegativeTestMultiTenantJSON-212472390-project-member] [instance: 53048279-c3ea-484f-9a3e-049b12892f7a] Updating instance_info_cache with network_info: [] {{(pid=61962) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1969.697796] env[61962]: INFO nova.compute.manager [None req-e21c42e3-3594-408e-b517-f54e516b53cd tempest-ServersNegativeTestMultiTenantJSON-212472390 tempest-ServersNegativeTestMultiTenantJSON-212472390-project-member] [instance: 53048279-c3ea-484f-9a3e-049b12892f7a] Took 0.43 seconds to deallocate network for instance. [ 1969.799738] env[61962]: INFO nova.scheduler.client.report [None req-e21c42e3-3594-408e-b517-f54e516b53cd tempest-ServersNegativeTestMultiTenantJSON-212472390 tempest-ServersNegativeTestMultiTenantJSON-212472390-project-member] Deleted allocations for instance 53048279-c3ea-484f-9a3e-049b12892f7a [ 1969.825420] env[61962]: DEBUG oslo_concurrency.lockutils [None req-e21c42e3-3594-408e-b517-f54e516b53cd tempest-ServersNegativeTestMultiTenantJSON-212472390 tempest-ServersNegativeTestMultiTenantJSON-212472390-project-member] Lock "53048279-c3ea-484f-9a3e-049b12892f7a" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 620.013s {{(pid=61962) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1969.826200] env[61962]: DEBUG oslo_concurrency.lockutils [None req-1d807e42-57be-4e74-beca-e8e0fad65f6b tempest-ServersNegativeTestMultiTenantJSON-212472390 tempest-ServersNegativeTestMultiTenantJSON-212472390-project-member] Lock "53048279-c3ea-484f-9a3e-049b12892f7a" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 423.654s {{(pid=61962) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1969.826392] env[61962]: DEBUG oslo_concurrency.lockutils [None req-1d807e42-57be-4e74-beca-e8e0fad65f6b tempest-ServersNegativeTestMultiTenantJSON-212472390 tempest-ServersNegativeTestMultiTenantJSON-212472390-project-member] Acquiring lock "53048279-c3ea-484f-9a3e-049b12892f7a-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=61962) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1969.826598] env[61962]: DEBUG oslo_concurrency.lockutils [None req-1d807e42-57be-4e74-beca-e8e0fad65f6b tempest-ServersNegativeTestMultiTenantJSON-212472390 tempest-ServersNegativeTestMultiTenantJSON-212472390-project-member] Lock "53048279-c3ea-484f-9a3e-049b12892f7a-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=61962) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1969.826763] env[61962]: DEBUG oslo_concurrency.lockutils [None req-1d807e42-57be-4e74-beca-e8e0fad65f6b tempest-ServersNegativeTestMultiTenantJSON-212472390 tempest-ServersNegativeTestMultiTenantJSON-212472390-project-member] Lock "53048279-c3ea-484f-9a3e-049b12892f7a-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=61962) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1969.828871] env[61962]: INFO nova.compute.manager [None req-1d807e42-57be-4e74-beca-e8e0fad65f6b tempest-ServersNegativeTestMultiTenantJSON-212472390 tempest-ServersNegativeTestMultiTenantJSON-212472390-project-member] [instance: 53048279-c3ea-484f-9a3e-049b12892f7a] Terminating instance [ 1969.830955] env[61962]: DEBUG nova.compute.manager [None req-1d807e42-57be-4e74-beca-e8e0fad65f6b tempest-ServersNegativeTestMultiTenantJSON-212472390 tempest-ServersNegativeTestMultiTenantJSON-212472390-project-member] [instance: 53048279-c3ea-484f-9a3e-049b12892f7a] Start destroying the instance on the hypervisor. {{(pid=61962) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3142}} [ 1969.831164] env[61962]: DEBUG nova.virt.vmwareapi.vmops [None req-1d807e42-57be-4e74-beca-e8e0fad65f6b tempest-ServersNegativeTestMultiTenantJSON-212472390 tempest-ServersNegativeTestMultiTenantJSON-212472390-project-member] [instance: 53048279-c3ea-484f-9a3e-049b12892f7a] Destroying instance {{(pid=61962) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1969.831744] env[61962]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-f0c54478-a77e-4d61-85b2-21820467672b {{(pid=61962) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1969.840936] env[61962]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e10ce94d-1c0a-4113-ab26-22c9a59a5495 {{(pid=61962) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1969.873132] env[61962]: WARNING nova.virt.vmwareapi.vmops [None req-1d807e42-57be-4e74-beca-e8e0fad65f6b tempest-ServersNegativeTestMultiTenantJSON-212472390 tempest-ServersNegativeTestMultiTenantJSON-212472390-project-member] [instance: 53048279-c3ea-484f-9a3e-049b12892f7a] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 53048279-c3ea-484f-9a3e-049b12892f7a could not be found. [ 1969.873385] env[61962]: DEBUG nova.virt.vmwareapi.vmops [None req-1d807e42-57be-4e74-beca-e8e0fad65f6b tempest-ServersNegativeTestMultiTenantJSON-212472390 tempest-ServersNegativeTestMultiTenantJSON-212472390-project-member] [instance: 53048279-c3ea-484f-9a3e-049b12892f7a] Instance destroyed {{(pid=61962) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1969.873570] env[61962]: INFO nova.compute.manager [None req-1d807e42-57be-4e74-beca-e8e0fad65f6b tempest-ServersNegativeTestMultiTenantJSON-212472390 tempest-ServersNegativeTestMultiTenantJSON-212472390-project-member] [instance: 53048279-c3ea-484f-9a3e-049b12892f7a] Took 0.04 seconds to destroy the instance on the hypervisor. [ 1969.873816] env[61962]: DEBUG oslo.service.loopingcall [None req-1d807e42-57be-4e74-beca-e8e0fad65f6b tempest-ServersNegativeTestMultiTenantJSON-212472390 tempest-ServersNegativeTestMultiTenantJSON-212472390-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61962) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1969.874060] env[61962]: DEBUG nova.compute.manager [-] [instance: 53048279-c3ea-484f-9a3e-049b12892f7a] Deallocating network for instance {{(pid=61962) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2281}} [ 1969.874156] env[61962]: DEBUG nova.network.neutron [-] [instance: 53048279-c3ea-484f-9a3e-049b12892f7a] deallocate_for_instance() {{(pid=61962) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1969.903687] env[61962]: DEBUG nova.network.neutron [-] [instance: 53048279-c3ea-484f-9a3e-049b12892f7a] Updating instance_info_cache with network_info: [] {{(pid=61962) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1969.911955] env[61962]: INFO nova.compute.manager [-] [instance: 53048279-c3ea-484f-9a3e-049b12892f7a] Took 0.04 seconds to deallocate network for instance. [ 1970.002422] env[61962]: DEBUG oslo_concurrency.lockutils [None req-1d807e42-57be-4e74-beca-e8e0fad65f6b tempest-ServersNegativeTestMultiTenantJSON-212472390 tempest-ServersNegativeTestMultiTenantJSON-212472390-project-member] Lock "53048279-c3ea-484f-9a3e-049b12892f7a" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 0.176s {{(pid=61962) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1970.003296] env[61962]: DEBUG oslo_concurrency.lockutils [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Lock "53048279-c3ea-484f-9a3e-049b12892f7a" acquired by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: waited 232.485s {{(pid=61962) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1970.003493] env[61962]: INFO nova.compute.manager [None req-ba337279-4320-40ca-b616-7e590432f203 None None] [instance: 53048279-c3ea-484f-9a3e-049b12892f7a] During sync_power_state the instance has a pending task (deleting). Skip. [ 1970.003670] env[61962]: DEBUG oslo_concurrency.lockutils [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Lock "53048279-c3ea-484f-9a3e-049b12892f7a" "released" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: held 0.000s {{(pid=61962) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2012.159381] env[61962]: DEBUG oslo_service.periodic_task [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Running periodic task ComputeManager.update_available_resource {{(pid=61962) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2012.171759] env[61962]: DEBUG oslo_concurrency.lockutils [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=61962) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2012.171983] env[61962]: DEBUG oslo_concurrency.lockutils [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=61962) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2012.172173] env[61962]: DEBUG oslo_concurrency.lockutils [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=61962) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2012.172337] env[61962]: DEBUG nova.compute.resource_tracker [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=61962) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 2012.173445] env[61962]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-267cdec2-0b56-4373-b951-3e71e51abe8c {{(pid=61962) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2012.182568] env[61962]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d31aa51d-bf67-4b89-bb64-2162a94bd3ff {{(pid=61962) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2012.197220] env[61962]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d259e0fd-52ff-4d9b-bc2b-162d64873ee7 {{(pid=61962) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2012.204402] env[61962]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-13f546c5-22dc-4c3c-a3ae-ebb204266b7d {{(pid=61962) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2012.236524] env[61962]: DEBUG nova.compute.resource_tracker [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=180561MB free_disk=96GB free_vcpus=48 pci_devices=None {{(pid=61962) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 2012.236684] env[61962]: DEBUG oslo_concurrency.lockutils [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=61962) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2012.236907] env[61962]: DEBUG oslo_concurrency.lockutils [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=61962) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2012.316675] env[61962]: DEBUG nova.compute.resource_tracker [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Instance 25f6fa54-3d0d-4470-b5c9-06ba0ad2663d actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61962) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 2012.316835] env[61962]: DEBUG nova.compute.resource_tracker [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Instance 1e71cc99-f026-4197-b7f1-ae18f7f99af6 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61962) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 2012.316989] env[61962]: DEBUG nova.compute.resource_tracker [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Instance 2cf5409f-eaff-4b63-b8bb-eb15a7a81760 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61962) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 2012.317098] env[61962]: DEBUG nova.compute.resource_tracker [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Instance 02c198ea-61de-4e70-866c-39781d353d04 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61962) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 2012.317219] env[61962]: DEBUG nova.compute.resource_tracker [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Instance 85003695-cb3e-4bce-9a3a-a1d799b36369 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61962) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 2012.317339] env[61962]: DEBUG nova.compute.resource_tracker [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Instance 1a74c820-67e9-424c-b4e6-333db3449251 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61962) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 2012.317577] env[61962]: DEBUG nova.compute.resource_tracker [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Total usable vcpus: 48, total allocated vcpus: 6 {{(pid=61962) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 2012.317724] env[61962]: DEBUG nova.compute.resource_tracker [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=1280MB phys_disk=100GB used_disk=6GB total_vcpus=48 used_vcpus=6 pci_stats=[] stats={'failed_builds': '82', 'num_instances': '6', 'num_vm_building': '6', 'num_task_deleting': '5', 'num_os_type_None': '6', 'num_proj_40bdd99e47aa40a28edd38c6b4a09a5f': '1', 'io_workload': '6', 'num_proj_56bb35c701074f02820d93ad5911d089': '2', 'num_proj_cc34536d51ef4a22aef0b274ba5aa28e': '2', 'num_proj_e58b68272c404983993a09ad12e8c03b': '1', 'num_task_spawning': '1'} {{(pid=61962) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 2012.403663] env[61962]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-22b95e35-49a6-43dc-a679-7cab3cdf378d {{(pid=61962) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2012.412094] env[61962]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4c76e4de-e943-437e-98e5-45f96450ca1a {{(pid=61962) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2012.442118] env[61962]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b9ad3e20-a724-4662-8724-2107dc468779 {{(pid=61962) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2012.450550] env[61962]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-125f9d52-fe52-4f16-a306-7bb0ffe359f4 {{(pid=61962) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2012.464197] env[61962]: DEBUG nova.compute.provider_tree [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Inventory has not changed in ProviderTree for provider: 5a20dc57-fddd-49ec-bab5-953a03eebaa1 {{(pid=61962) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2012.472938] env[61962]: DEBUG nova.scheduler.client.report [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Inventory has not changed for provider 5a20dc57-fddd-49ec-bab5-953a03eebaa1 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 96, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61962) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 2012.487979] env[61962]: DEBUG nova.compute.resource_tracker [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=61962) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 2012.488182] env[61962]: DEBUG oslo_concurrency.lockutils [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 0.251s {{(pid=61962) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2016.488983] env[61962]: DEBUG oslo_service.periodic_task [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=61962) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2016.488983] env[61962]: DEBUG nova.compute.manager [None req-ba337279-4320-40ca-b616-7e590432f203 None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=61962) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10626}} [ 2017.227284] env[61962]: WARNING oslo_vmware.rw_handles [None req-14df904d-2edb-4fe8-be90-6d0f9cac5476 tempest-AttachVolumeNegativeTest-1446100430 tempest-AttachVolumeNegativeTest-1446100430-project-member] Error occurred while reading the HTTP response.: http.client.RemoteDisconnected: Remote end closed connection without response [ 2017.227284] env[61962]: ERROR oslo_vmware.rw_handles Traceback (most recent call last): [ 2017.227284] env[61962]: ERROR oslo_vmware.rw_handles File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py", line 283, in close [ 2017.227284] env[61962]: ERROR oslo_vmware.rw_handles self._conn.getresponse() [ 2017.227284] env[61962]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 1375, in getresponse [ 2017.227284] env[61962]: ERROR oslo_vmware.rw_handles response.begin() [ 2017.227284] env[61962]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 318, in begin [ 2017.227284] env[61962]: ERROR oslo_vmware.rw_handles version, status, reason = self._read_status() [ 2017.227284] env[61962]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 287, in _read_status [ 2017.227284] env[61962]: ERROR oslo_vmware.rw_handles raise RemoteDisconnected("Remote end closed connection without" [ 2017.227284] env[61962]: ERROR oslo_vmware.rw_handles http.client.RemoteDisconnected: Remote end closed connection without response [ 2017.227284] env[61962]: ERROR oslo_vmware.rw_handles [ 2017.227907] env[61962]: DEBUG nova.virt.vmwareapi.images [None req-14df904d-2edb-4fe8-be90-6d0f9cac5476 tempest-AttachVolumeNegativeTest-1446100430 tempest-AttachVolumeNegativeTest-1446100430-project-member] [instance: 25f6fa54-3d0d-4470-b5c9-06ba0ad2663d] Downloaded image file data f684bb17-3ab2-4bd5-a19e-4c36c57d0ebe to vmware_temp/2319b4c9-861b-4fd9-ae92-914be600ed08/f684bb17-3ab2-4bd5-a19e-4c36c57d0ebe/tmp-sparse.vmdk on the data store datastore2 {{(pid=61962) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:258}} [ 2017.229809] env[61962]: DEBUG nova.virt.vmwareapi.vmops [None req-14df904d-2edb-4fe8-be90-6d0f9cac5476 tempest-AttachVolumeNegativeTest-1446100430 tempest-AttachVolumeNegativeTest-1446100430-project-member] [instance: 25f6fa54-3d0d-4470-b5c9-06ba0ad2663d] Caching image {{(pid=61962) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 2017.230064] env[61962]: DEBUG nova.virt.vmwareapi.vm_util [None req-14df904d-2edb-4fe8-be90-6d0f9cac5476 tempest-AttachVolumeNegativeTest-1446100430 tempest-AttachVolumeNegativeTest-1446100430-project-member] Copying Virtual Disk [datastore2] vmware_temp/2319b4c9-861b-4fd9-ae92-914be600ed08/f684bb17-3ab2-4bd5-a19e-4c36c57d0ebe/tmp-sparse.vmdk to [datastore2] vmware_temp/2319b4c9-861b-4fd9-ae92-914be600ed08/f684bb17-3ab2-4bd5-a19e-4c36c57d0ebe/f684bb17-3ab2-4bd5-a19e-4c36c57d0ebe.vmdk {{(pid=61962) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 2017.230344] env[61962]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-6494f0fa-c241-4ac3-abcf-dc9d883678bd {{(pid=61962) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2017.240175] env[61962]: DEBUG oslo_vmware.api [None req-14df904d-2edb-4fe8-be90-6d0f9cac5476 tempest-AttachVolumeNegativeTest-1446100430 tempest-AttachVolumeNegativeTest-1446100430-project-member] Waiting for the task: (returnval){ [ 2017.240175] env[61962]: value = "task-4892093" [ 2017.240175] env[61962]: _type = "Task" [ 2017.240175] env[61962]: } to complete. {{(pid=61962) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2017.248453] env[61962]: DEBUG oslo_vmware.api [None req-14df904d-2edb-4fe8-be90-6d0f9cac5476 tempest-AttachVolumeNegativeTest-1446100430 tempest-AttachVolumeNegativeTest-1446100430-project-member] Task: {'id': task-4892093, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61962) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2017.750578] env[61962]: DEBUG oslo_vmware.exceptions [None req-14df904d-2edb-4fe8-be90-6d0f9cac5476 tempest-AttachVolumeNegativeTest-1446100430 tempest-AttachVolumeNegativeTest-1446100430-project-member] Fault InvalidArgument not matched. {{(pid=61962) get_fault_class /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/exceptions.py:290}} [ 2017.750972] env[61962]: DEBUG oslo_concurrency.lockutils [None req-14df904d-2edb-4fe8-be90-6d0f9cac5476 tempest-AttachVolumeNegativeTest-1446100430 tempest-AttachVolumeNegativeTest-1446100430-project-member] Releasing lock "[datastore2] devstack-image-cache_base/f684bb17-3ab2-4bd5-a19e-4c36c57d0ebe/f684bb17-3ab2-4bd5-a19e-4c36c57d0ebe.vmdk" {{(pid=61962) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2017.751391] env[61962]: ERROR nova.compute.manager [None req-14df904d-2edb-4fe8-be90-6d0f9cac5476 tempest-AttachVolumeNegativeTest-1446100430 tempest-AttachVolumeNegativeTest-1446100430-project-member] [instance: 25f6fa54-3d0d-4470-b5c9-06ba0ad2663d] Instance failed to spawn: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 2017.751391] env[61962]: Faults: ['InvalidArgument'] [ 2017.751391] env[61962]: ERROR nova.compute.manager [instance: 25f6fa54-3d0d-4470-b5c9-06ba0ad2663d] Traceback (most recent call last): [ 2017.751391] env[61962]: ERROR nova.compute.manager [instance: 25f6fa54-3d0d-4470-b5c9-06ba0ad2663d] File "/opt/stack/nova/nova/compute/manager.py", line 2886, in _build_resources [ 2017.751391] env[61962]: ERROR nova.compute.manager [instance: 25f6fa54-3d0d-4470-b5c9-06ba0ad2663d] yield resources [ 2017.751391] env[61962]: ERROR nova.compute.manager [instance: 25f6fa54-3d0d-4470-b5c9-06ba0ad2663d] File "/opt/stack/nova/nova/compute/manager.py", line 2633, in _build_and_run_instance [ 2017.751391] env[61962]: ERROR nova.compute.manager [instance: 25f6fa54-3d0d-4470-b5c9-06ba0ad2663d] self.driver.spawn(context, instance, image_meta, [ 2017.751391] env[61962]: ERROR nova.compute.manager [instance: 25f6fa54-3d0d-4470-b5c9-06ba0ad2663d] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 2017.751391] env[61962]: ERROR nova.compute.manager [instance: 25f6fa54-3d0d-4470-b5c9-06ba0ad2663d] self._vmops.spawn(context, instance, image_meta, injected_files, [ 2017.751391] env[61962]: ERROR nova.compute.manager [instance: 25f6fa54-3d0d-4470-b5c9-06ba0ad2663d] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 2017.751391] env[61962]: ERROR nova.compute.manager [instance: 25f6fa54-3d0d-4470-b5c9-06ba0ad2663d] self._fetch_image_if_missing(context, vi) [ 2017.751391] env[61962]: ERROR nova.compute.manager [instance: 25f6fa54-3d0d-4470-b5c9-06ba0ad2663d] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 2017.751794] env[61962]: ERROR nova.compute.manager [instance: 25f6fa54-3d0d-4470-b5c9-06ba0ad2663d] image_cache(vi, tmp_image_ds_loc) [ 2017.751794] env[61962]: ERROR nova.compute.manager [instance: 25f6fa54-3d0d-4470-b5c9-06ba0ad2663d] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 2017.751794] env[61962]: ERROR nova.compute.manager [instance: 25f6fa54-3d0d-4470-b5c9-06ba0ad2663d] vm_util.copy_virtual_disk( [ 2017.751794] env[61962]: ERROR nova.compute.manager [instance: 25f6fa54-3d0d-4470-b5c9-06ba0ad2663d] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 2017.751794] env[61962]: ERROR nova.compute.manager [instance: 25f6fa54-3d0d-4470-b5c9-06ba0ad2663d] session._wait_for_task(vmdk_copy_task) [ 2017.751794] env[61962]: ERROR nova.compute.manager [instance: 25f6fa54-3d0d-4470-b5c9-06ba0ad2663d] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 2017.751794] env[61962]: ERROR nova.compute.manager [instance: 25f6fa54-3d0d-4470-b5c9-06ba0ad2663d] return self.wait_for_task(task_ref) [ 2017.751794] env[61962]: ERROR nova.compute.manager [instance: 25f6fa54-3d0d-4470-b5c9-06ba0ad2663d] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 2017.751794] env[61962]: ERROR nova.compute.manager [instance: 25f6fa54-3d0d-4470-b5c9-06ba0ad2663d] return evt.wait() [ 2017.751794] env[61962]: ERROR nova.compute.manager [instance: 25f6fa54-3d0d-4470-b5c9-06ba0ad2663d] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 2017.751794] env[61962]: ERROR nova.compute.manager [instance: 25f6fa54-3d0d-4470-b5c9-06ba0ad2663d] result = hub.switch() [ 2017.751794] env[61962]: ERROR nova.compute.manager [instance: 25f6fa54-3d0d-4470-b5c9-06ba0ad2663d] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 2017.751794] env[61962]: ERROR nova.compute.manager [instance: 25f6fa54-3d0d-4470-b5c9-06ba0ad2663d] return self.greenlet.switch() [ 2017.752184] env[61962]: ERROR nova.compute.manager [instance: 25f6fa54-3d0d-4470-b5c9-06ba0ad2663d] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 2017.752184] env[61962]: ERROR nova.compute.manager [instance: 25f6fa54-3d0d-4470-b5c9-06ba0ad2663d] self.f(*self.args, **self.kw) [ 2017.752184] env[61962]: ERROR nova.compute.manager [instance: 25f6fa54-3d0d-4470-b5c9-06ba0ad2663d] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 2017.752184] env[61962]: ERROR nova.compute.manager [instance: 25f6fa54-3d0d-4470-b5c9-06ba0ad2663d] raise exceptions.translate_fault(task_info.error) [ 2017.752184] env[61962]: ERROR nova.compute.manager [instance: 25f6fa54-3d0d-4470-b5c9-06ba0ad2663d] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 2017.752184] env[61962]: ERROR nova.compute.manager [instance: 25f6fa54-3d0d-4470-b5c9-06ba0ad2663d] Faults: ['InvalidArgument'] [ 2017.752184] env[61962]: ERROR nova.compute.manager [instance: 25f6fa54-3d0d-4470-b5c9-06ba0ad2663d] [ 2017.752184] env[61962]: INFO nova.compute.manager [None req-14df904d-2edb-4fe8-be90-6d0f9cac5476 tempest-AttachVolumeNegativeTest-1446100430 tempest-AttachVolumeNegativeTest-1446100430-project-member] [instance: 25f6fa54-3d0d-4470-b5c9-06ba0ad2663d] Terminating instance [ 2017.753299] env[61962]: DEBUG oslo_concurrency.lockutils [None req-8efd776f-39ec-4e55-8eeb-a7bd10a3e451 tempest-ListServerFiltersTestJSON-924365092 tempest-ListServerFiltersTestJSON-924365092-project-member] Acquired lock "[datastore2] devstack-image-cache_base/f684bb17-3ab2-4bd5-a19e-4c36c57d0ebe/f684bb17-3ab2-4bd5-a19e-4c36c57d0ebe.vmdk" {{(pid=61962) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2017.753506] env[61962]: DEBUG nova.virt.vmwareapi.ds_util [None req-8efd776f-39ec-4e55-8eeb-a7bd10a3e451 tempest-ListServerFiltersTestJSON-924365092 tempest-ListServerFiltersTestJSON-924365092-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=61962) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 2017.753751] env[61962]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-c5d74c89-3a81-4d42-b3d2-d8bf53451bea {{(pid=61962) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2017.755923] env[61962]: DEBUG nova.compute.manager [None req-14df904d-2edb-4fe8-be90-6d0f9cac5476 tempest-AttachVolumeNegativeTest-1446100430 tempest-AttachVolumeNegativeTest-1446100430-project-member] [instance: 25f6fa54-3d0d-4470-b5c9-06ba0ad2663d] Start destroying the instance on the hypervisor. {{(pid=61962) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3142}} [ 2017.756129] env[61962]: DEBUG nova.virt.vmwareapi.vmops [None req-14df904d-2edb-4fe8-be90-6d0f9cac5476 tempest-AttachVolumeNegativeTest-1446100430 tempest-AttachVolumeNegativeTest-1446100430-project-member] [instance: 25f6fa54-3d0d-4470-b5c9-06ba0ad2663d] Destroying instance {{(pid=61962) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 2017.756844] env[61962]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-60a4a0f8-317e-46e3-a161-448434c8204d {{(pid=61962) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2017.763960] env[61962]: DEBUG nova.virt.vmwareapi.vmops [None req-14df904d-2edb-4fe8-be90-6d0f9cac5476 tempest-AttachVolumeNegativeTest-1446100430 tempest-AttachVolumeNegativeTest-1446100430-project-member] [instance: 25f6fa54-3d0d-4470-b5c9-06ba0ad2663d] Unregistering the VM {{(pid=61962) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 2017.764188] env[61962]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-d8812185-f671-4d59-80be-a5e82383702c {{(pid=61962) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2017.766339] env[61962]: DEBUG nova.virt.vmwareapi.ds_util [None req-8efd776f-39ec-4e55-8eeb-a7bd10a3e451 tempest-ListServerFiltersTestJSON-924365092 tempest-ListServerFiltersTestJSON-924365092-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=61962) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 2017.766509] env[61962]: DEBUG nova.virt.vmwareapi.vmops [None req-8efd776f-39ec-4e55-8eeb-a7bd10a3e451 tempest-ListServerFiltersTestJSON-924365092 tempest-ListServerFiltersTestJSON-924365092-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=61962) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 2017.767439] env[61962]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-727072d4-57df-4542-a011-183d0895eeac {{(pid=61962) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2017.772215] env[61962]: DEBUG oslo_vmware.api [None req-8efd776f-39ec-4e55-8eeb-a7bd10a3e451 tempest-ListServerFiltersTestJSON-924365092 tempest-ListServerFiltersTestJSON-924365092-project-member] Waiting for the task: (returnval){ [ 2017.772215] env[61962]: value = "session[5210918f-aadc-9b29-42fa-0929c2e42627]52d7b7df-dd97-c297-30aa-1df04486f4aa" [ 2017.772215] env[61962]: _type = "Task" [ 2017.772215] env[61962]: } to complete. {{(pid=61962) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2017.779865] env[61962]: DEBUG oslo_vmware.api [None req-8efd776f-39ec-4e55-8eeb-a7bd10a3e451 tempest-ListServerFiltersTestJSON-924365092 tempest-ListServerFiltersTestJSON-924365092-project-member] Task: {'id': session[5210918f-aadc-9b29-42fa-0929c2e42627]52d7b7df-dd97-c297-30aa-1df04486f4aa, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61962) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2017.839606] env[61962]: DEBUG nova.virt.vmwareapi.vmops [None req-14df904d-2edb-4fe8-be90-6d0f9cac5476 tempest-AttachVolumeNegativeTest-1446100430 tempest-AttachVolumeNegativeTest-1446100430-project-member] [instance: 25f6fa54-3d0d-4470-b5c9-06ba0ad2663d] Unregistered the VM {{(pid=61962) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 2017.839786] env[61962]: DEBUG nova.virt.vmwareapi.vmops [None req-14df904d-2edb-4fe8-be90-6d0f9cac5476 tempest-AttachVolumeNegativeTest-1446100430 tempest-AttachVolumeNegativeTest-1446100430-project-member] [instance: 25f6fa54-3d0d-4470-b5c9-06ba0ad2663d] Deleting contents of the VM from datastore datastore2 {{(pid=61962) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 2017.839948] env[61962]: DEBUG nova.virt.vmwareapi.ds_util [None req-14df904d-2edb-4fe8-be90-6d0f9cac5476 tempest-AttachVolumeNegativeTest-1446100430 tempest-AttachVolumeNegativeTest-1446100430-project-member] Deleting the datastore file [datastore2] 25f6fa54-3d0d-4470-b5c9-06ba0ad2663d {{(pid=61962) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 2017.840235] env[61962]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-aeffdc32-335a-4476-9922-a88f4b059fa0 {{(pid=61962) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2017.847743] env[61962]: DEBUG oslo_vmware.api [None req-14df904d-2edb-4fe8-be90-6d0f9cac5476 tempest-AttachVolumeNegativeTest-1446100430 tempest-AttachVolumeNegativeTest-1446100430-project-member] Waiting for the task: (returnval){ [ 2017.847743] env[61962]: value = "task-4892095" [ 2017.847743] env[61962]: _type = "Task" [ 2017.847743] env[61962]: } to complete. {{(pid=61962) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2017.855645] env[61962]: DEBUG oslo_vmware.api [None req-14df904d-2edb-4fe8-be90-6d0f9cac5476 tempest-AttachVolumeNegativeTest-1446100430 tempest-AttachVolumeNegativeTest-1446100430-project-member] Task: {'id': task-4892095, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61962) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2018.283884] env[61962]: DEBUG nova.virt.vmwareapi.vmops [None req-8efd776f-39ec-4e55-8eeb-a7bd10a3e451 tempest-ListServerFiltersTestJSON-924365092 tempest-ListServerFiltersTestJSON-924365092-project-member] [instance: 1e71cc99-f026-4197-b7f1-ae18f7f99af6] Preparing fetch location {{(pid=61962) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 2018.284155] env[61962]: DEBUG nova.virt.vmwareapi.ds_util [None req-8efd776f-39ec-4e55-8eeb-a7bd10a3e451 tempest-ListServerFiltersTestJSON-924365092 tempest-ListServerFiltersTestJSON-924365092-project-member] Creating directory with path [datastore2] vmware_temp/3a395a09-faaa-48be-9107-145b1a21e81d/f684bb17-3ab2-4bd5-a19e-4c36c57d0ebe {{(pid=61962) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 2018.284410] env[61962]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-a9b22e9e-43bb-4150-9706-f3f8ec33bb99 {{(pid=61962) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2018.298505] env[61962]: DEBUG nova.virt.vmwareapi.ds_util [None req-8efd776f-39ec-4e55-8eeb-a7bd10a3e451 tempest-ListServerFiltersTestJSON-924365092 tempest-ListServerFiltersTestJSON-924365092-project-member] Created directory with path [datastore2] vmware_temp/3a395a09-faaa-48be-9107-145b1a21e81d/f684bb17-3ab2-4bd5-a19e-4c36c57d0ebe {{(pid=61962) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 2018.298750] env[61962]: DEBUG nova.virt.vmwareapi.vmops [None req-8efd776f-39ec-4e55-8eeb-a7bd10a3e451 tempest-ListServerFiltersTestJSON-924365092 tempest-ListServerFiltersTestJSON-924365092-project-member] [instance: 1e71cc99-f026-4197-b7f1-ae18f7f99af6] Fetch image to [datastore2] vmware_temp/3a395a09-faaa-48be-9107-145b1a21e81d/f684bb17-3ab2-4bd5-a19e-4c36c57d0ebe/tmp-sparse.vmdk {{(pid=61962) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 2018.298871] env[61962]: DEBUG nova.virt.vmwareapi.vmops [None req-8efd776f-39ec-4e55-8eeb-a7bd10a3e451 tempest-ListServerFiltersTestJSON-924365092 tempest-ListServerFiltersTestJSON-924365092-project-member] [instance: 1e71cc99-f026-4197-b7f1-ae18f7f99af6] Downloading image file data f684bb17-3ab2-4bd5-a19e-4c36c57d0ebe to [datastore2] vmware_temp/3a395a09-faaa-48be-9107-145b1a21e81d/f684bb17-3ab2-4bd5-a19e-4c36c57d0ebe/tmp-sparse.vmdk on the data store datastore2 {{(pid=61962) _fetch_image_as_file /opt/stack/nova/nova/virt/vmwareapi/vmops.py:399}} [ 2018.299694] env[61962]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ad958de2-6ed7-4f3b-81c1-f48000c9e2e4 {{(pid=61962) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2018.307941] env[61962]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-072f05b1-72c8-4b28-8736-30a36c209eed {{(pid=61962) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2018.318962] env[61962]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2e7b38c4-52a7-4583-9d08-9f8aff8a23aa {{(pid=61962) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2018.354937] env[61962]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7f33d08a-2dc3-47e7-a8a1-6fbe7a8264f7 {{(pid=61962) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2018.364484] env[61962]: DEBUG oslo_vmware.api [None req-14df904d-2edb-4fe8-be90-6d0f9cac5476 tempest-AttachVolumeNegativeTest-1446100430 tempest-AttachVolumeNegativeTest-1446100430-project-member] Task: {'id': task-4892095, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.083469} completed successfully. {{(pid=61962) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2018.366144] env[61962]: DEBUG nova.virt.vmwareapi.ds_util [None req-14df904d-2edb-4fe8-be90-6d0f9cac5476 tempest-AttachVolumeNegativeTest-1446100430 tempest-AttachVolumeNegativeTest-1446100430-project-member] Deleted the datastore file {{(pid=61962) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 2018.366344] env[61962]: DEBUG nova.virt.vmwareapi.vmops [None req-14df904d-2edb-4fe8-be90-6d0f9cac5476 tempest-AttachVolumeNegativeTest-1446100430 tempest-AttachVolumeNegativeTest-1446100430-project-member] [instance: 25f6fa54-3d0d-4470-b5c9-06ba0ad2663d] Deleted contents of the VM from datastore datastore2 {{(pid=61962) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 2018.366522] env[61962]: DEBUG nova.virt.vmwareapi.vmops [None req-14df904d-2edb-4fe8-be90-6d0f9cac5476 tempest-AttachVolumeNegativeTest-1446100430 tempest-AttachVolumeNegativeTest-1446100430-project-member] [instance: 25f6fa54-3d0d-4470-b5c9-06ba0ad2663d] Instance destroyed {{(pid=61962) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 2018.366745] env[61962]: INFO nova.compute.manager [None req-14df904d-2edb-4fe8-be90-6d0f9cac5476 tempest-AttachVolumeNegativeTest-1446100430 tempest-AttachVolumeNegativeTest-1446100430-project-member] [instance: 25f6fa54-3d0d-4470-b5c9-06ba0ad2663d] Took 0.61 seconds to destroy the instance on the hypervisor. [ 2018.368845] env[61962]: DEBUG oslo_vmware.service [-] Invoking SessionManager.AcquireGenericServiceTicket with opID=oslo.vmware-8524b1e7-76bf-43a9-b944-5f0727a50363 {{(pid=61962) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2018.371275] env[61962]: DEBUG nova.compute.claims [None req-14df904d-2edb-4fe8-be90-6d0f9cac5476 tempest-AttachVolumeNegativeTest-1446100430 tempest-AttachVolumeNegativeTest-1446100430-project-member] [instance: 25f6fa54-3d0d-4470-b5c9-06ba0ad2663d] Aborting claim: {{(pid=61962) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 2018.371523] env[61962]: DEBUG oslo_concurrency.lockutils [None req-14df904d-2edb-4fe8-be90-6d0f9cac5476 tempest-AttachVolumeNegativeTest-1446100430 tempest-AttachVolumeNegativeTest-1446100430-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=61962) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2018.371830] env[61962]: DEBUG oslo_concurrency.lockutils [None req-14df904d-2edb-4fe8-be90-6d0f9cac5476 tempest-AttachVolumeNegativeTest-1446100430 tempest-AttachVolumeNegativeTest-1446100430-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 0.000s {{(pid=61962) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2018.399362] env[61962]: DEBUG nova.virt.vmwareapi.images [None req-8efd776f-39ec-4e55-8eeb-a7bd10a3e451 tempest-ListServerFiltersTestJSON-924365092 tempest-ListServerFiltersTestJSON-924365092-project-member] [instance: 1e71cc99-f026-4197-b7f1-ae18f7f99af6] Downloading image file data f684bb17-3ab2-4bd5-a19e-4c36c57d0ebe to the data store datastore2 {{(pid=61962) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:245}} [ 2018.468201] env[61962]: DEBUG oslo_vmware.rw_handles [None req-8efd776f-39ec-4e55-8eeb-a7bd10a3e451 tempest-ListServerFiltersTestJSON-924365092 tempest-ListServerFiltersTestJSON-924365092-project-member] Creating HTTP connection to write to file with size = 21318656 and URL = https://esx7c1n1.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/3a395a09-faaa-48be-9107-145b1a21e81d/f684bb17-3ab2-4bd5-a19e-4c36c57d0ebe/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=61962) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 2018.531316] env[61962]: DEBUG oslo_vmware.rw_handles [None req-8efd776f-39ec-4e55-8eeb-a7bd10a3e451 tempest-ListServerFiltersTestJSON-924365092 tempest-ListServerFiltersTestJSON-924365092-project-member] Completed reading data from the image iterator. {{(pid=61962) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 2018.531562] env[61962]: DEBUG oslo_vmware.rw_handles [None req-8efd776f-39ec-4e55-8eeb-a7bd10a3e451 tempest-ListServerFiltersTestJSON-924365092 tempest-ListServerFiltersTestJSON-924365092-project-member] Closing write handle for https://esx7c1n1.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/3a395a09-faaa-48be-9107-145b1a21e81d/f684bb17-3ab2-4bd5-a19e-4c36c57d0ebe/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=61962) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:281}} [ 2018.580865] env[61962]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-938d49bd-2f75-4b66-b2b8-ea2dde235ad4 {{(pid=61962) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2018.590946] env[61962]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ba9dc1cf-71d4-4813-9643-b8fac0598839 {{(pid=61962) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2018.630195] env[61962]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-930771ed-20d7-469b-8b4c-97a5c22d429a {{(pid=61962) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2018.638864] env[61962]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-71e9a671-ff08-4737-994e-05f8e9bef834 {{(pid=61962) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2018.654021] env[61962]: DEBUG nova.compute.provider_tree [None req-14df904d-2edb-4fe8-be90-6d0f9cac5476 tempest-AttachVolumeNegativeTest-1446100430 tempest-AttachVolumeNegativeTest-1446100430-project-member] Inventory has not changed in ProviderTree for provider: 5a20dc57-fddd-49ec-bab5-953a03eebaa1 {{(pid=61962) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2018.663540] env[61962]: DEBUG nova.scheduler.client.report [None req-14df904d-2edb-4fe8-be90-6d0f9cac5476 tempest-AttachVolumeNegativeTest-1446100430 tempest-AttachVolumeNegativeTest-1446100430-project-member] Inventory has not changed for provider 5a20dc57-fddd-49ec-bab5-953a03eebaa1 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 96, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61962) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 2018.680159] env[61962]: DEBUG oslo_concurrency.lockutils [None req-14df904d-2edb-4fe8-be90-6d0f9cac5476 tempest-AttachVolumeNegativeTest-1446100430 tempest-AttachVolumeNegativeTest-1446100430-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 0.308s {{(pid=61962) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2018.680718] env[61962]: ERROR nova.compute.manager [None req-14df904d-2edb-4fe8-be90-6d0f9cac5476 tempest-AttachVolumeNegativeTest-1446100430 tempest-AttachVolumeNegativeTest-1446100430-project-member] [instance: 25f6fa54-3d0d-4470-b5c9-06ba0ad2663d] Failed to build and run instance: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 2018.680718] env[61962]: Faults: ['InvalidArgument'] [ 2018.680718] env[61962]: ERROR nova.compute.manager [instance: 25f6fa54-3d0d-4470-b5c9-06ba0ad2663d] Traceback (most recent call last): [ 2018.680718] env[61962]: ERROR nova.compute.manager [instance: 25f6fa54-3d0d-4470-b5c9-06ba0ad2663d] File "/opt/stack/nova/nova/compute/manager.py", line 2633, in _build_and_run_instance [ 2018.680718] env[61962]: ERROR nova.compute.manager [instance: 25f6fa54-3d0d-4470-b5c9-06ba0ad2663d] self.driver.spawn(context, instance, image_meta, [ 2018.680718] env[61962]: ERROR nova.compute.manager [instance: 25f6fa54-3d0d-4470-b5c9-06ba0ad2663d] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 2018.680718] env[61962]: ERROR nova.compute.manager [instance: 25f6fa54-3d0d-4470-b5c9-06ba0ad2663d] self._vmops.spawn(context, instance, image_meta, injected_files, [ 2018.680718] env[61962]: ERROR nova.compute.manager [instance: 25f6fa54-3d0d-4470-b5c9-06ba0ad2663d] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 2018.680718] env[61962]: ERROR nova.compute.manager [instance: 25f6fa54-3d0d-4470-b5c9-06ba0ad2663d] self._fetch_image_if_missing(context, vi) [ 2018.680718] env[61962]: ERROR nova.compute.manager [instance: 25f6fa54-3d0d-4470-b5c9-06ba0ad2663d] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 2018.680718] env[61962]: ERROR nova.compute.manager [instance: 25f6fa54-3d0d-4470-b5c9-06ba0ad2663d] image_cache(vi, tmp_image_ds_loc) [ 2018.680718] env[61962]: ERROR nova.compute.manager [instance: 25f6fa54-3d0d-4470-b5c9-06ba0ad2663d] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 2018.681172] env[61962]: ERROR nova.compute.manager [instance: 25f6fa54-3d0d-4470-b5c9-06ba0ad2663d] vm_util.copy_virtual_disk( [ 2018.681172] env[61962]: ERROR nova.compute.manager [instance: 25f6fa54-3d0d-4470-b5c9-06ba0ad2663d] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 2018.681172] env[61962]: ERROR nova.compute.manager [instance: 25f6fa54-3d0d-4470-b5c9-06ba0ad2663d] session._wait_for_task(vmdk_copy_task) [ 2018.681172] env[61962]: ERROR nova.compute.manager [instance: 25f6fa54-3d0d-4470-b5c9-06ba0ad2663d] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 2018.681172] env[61962]: ERROR nova.compute.manager [instance: 25f6fa54-3d0d-4470-b5c9-06ba0ad2663d] return self.wait_for_task(task_ref) [ 2018.681172] env[61962]: ERROR nova.compute.manager [instance: 25f6fa54-3d0d-4470-b5c9-06ba0ad2663d] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 2018.681172] env[61962]: ERROR nova.compute.manager [instance: 25f6fa54-3d0d-4470-b5c9-06ba0ad2663d] return evt.wait() [ 2018.681172] env[61962]: ERROR nova.compute.manager [instance: 25f6fa54-3d0d-4470-b5c9-06ba0ad2663d] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 2018.681172] env[61962]: ERROR nova.compute.manager [instance: 25f6fa54-3d0d-4470-b5c9-06ba0ad2663d] result = hub.switch() [ 2018.681172] env[61962]: ERROR nova.compute.manager [instance: 25f6fa54-3d0d-4470-b5c9-06ba0ad2663d] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 2018.681172] env[61962]: ERROR nova.compute.manager [instance: 25f6fa54-3d0d-4470-b5c9-06ba0ad2663d] return self.greenlet.switch() [ 2018.681172] env[61962]: ERROR nova.compute.manager [instance: 25f6fa54-3d0d-4470-b5c9-06ba0ad2663d] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 2018.681172] env[61962]: ERROR nova.compute.manager [instance: 25f6fa54-3d0d-4470-b5c9-06ba0ad2663d] self.f(*self.args, **self.kw) [ 2018.681894] env[61962]: ERROR nova.compute.manager [instance: 25f6fa54-3d0d-4470-b5c9-06ba0ad2663d] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 2018.681894] env[61962]: ERROR nova.compute.manager [instance: 25f6fa54-3d0d-4470-b5c9-06ba0ad2663d] raise exceptions.translate_fault(task_info.error) [ 2018.681894] env[61962]: ERROR nova.compute.manager [instance: 25f6fa54-3d0d-4470-b5c9-06ba0ad2663d] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 2018.681894] env[61962]: ERROR nova.compute.manager [instance: 25f6fa54-3d0d-4470-b5c9-06ba0ad2663d] Faults: ['InvalidArgument'] [ 2018.681894] env[61962]: ERROR nova.compute.manager [instance: 25f6fa54-3d0d-4470-b5c9-06ba0ad2663d] [ 2018.681894] env[61962]: DEBUG nova.compute.utils [None req-14df904d-2edb-4fe8-be90-6d0f9cac5476 tempest-AttachVolumeNegativeTest-1446100430 tempest-AttachVolumeNegativeTest-1446100430-project-member] [instance: 25f6fa54-3d0d-4470-b5c9-06ba0ad2663d] VimFaultException {{(pid=61962) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 2018.683057] env[61962]: DEBUG nova.compute.manager [None req-14df904d-2edb-4fe8-be90-6d0f9cac5476 tempest-AttachVolumeNegativeTest-1446100430 tempest-AttachVolumeNegativeTest-1446100430-project-member] [instance: 25f6fa54-3d0d-4470-b5c9-06ba0ad2663d] Build of instance 25f6fa54-3d0d-4470-b5c9-06ba0ad2663d was re-scheduled: A specified parameter was not correct: fileType [ 2018.683057] env[61962]: Faults: ['InvalidArgument'] {{(pid=61962) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2472}} [ 2018.683459] env[61962]: DEBUG nova.compute.manager [None req-14df904d-2edb-4fe8-be90-6d0f9cac5476 tempest-AttachVolumeNegativeTest-1446100430 tempest-AttachVolumeNegativeTest-1446100430-project-member] [instance: 25f6fa54-3d0d-4470-b5c9-06ba0ad2663d] Unplugging VIFs for instance {{(pid=61962) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:2998}} [ 2018.683627] env[61962]: DEBUG nova.compute.manager [None req-14df904d-2edb-4fe8-be90-6d0f9cac5476 tempest-AttachVolumeNegativeTest-1446100430 tempest-AttachVolumeNegativeTest-1446100430-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=61962) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3021}} [ 2018.683801] env[61962]: DEBUG nova.compute.manager [None req-14df904d-2edb-4fe8-be90-6d0f9cac5476 tempest-AttachVolumeNegativeTest-1446100430 tempest-AttachVolumeNegativeTest-1446100430-project-member] [instance: 25f6fa54-3d0d-4470-b5c9-06ba0ad2663d] Deallocating network for instance {{(pid=61962) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2281}} [ 2018.683999] env[61962]: DEBUG nova.network.neutron [None req-14df904d-2edb-4fe8-be90-6d0f9cac5476 tempest-AttachVolumeNegativeTest-1446100430 tempest-AttachVolumeNegativeTest-1446100430-project-member] [instance: 25f6fa54-3d0d-4470-b5c9-06ba0ad2663d] deallocate_for_instance() {{(pid=61962) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 2019.319678] env[61962]: DEBUG nova.network.neutron [None req-14df904d-2edb-4fe8-be90-6d0f9cac5476 tempest-AttachVolumeNegativeTest-1446100430 tempest-AttachVolumeNegativeTest-1446100430-project-member] [instance: 25f6fa54-3d0d-4470-b5c9-06ba0ad2663d] Updating instance_info_cache with network_info: [] {{(pid=61962) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2019.337182] env[61962]: INFO nova.compute.manager [None req-14df904d-2edb-4fe8-be90-6d0f9cac5476 tempest-AttachVolumeNegativeTest-1446100430 tempest-AttachVolumeNegativeTest-1446100430-project-member] [instance: 25f6fa54-3d0d-4470-b5c9-06ba0ad2663d] Took 0.65 seconds to deallocate network for instance. [ 2019.450974] env[61962]: INFO nova.scheduler.client.report [None req-14df904d-2edb-4fe8-be90-6d0f9cac5476 tempest-AttachVolumeNegativeTest-1446100430 tempest-AttachVolumeNegativeTest-1446100430-project-member] Deleted allocations for instance 25f6fa54-3d0d-4470-b5c9-06ba0ad2663d [ 2019.477236] env[61962]: DEBUG oslo_concurrency.lockutils [None req-14df904d-2edb-4fe8-be90-6d0f9cac5476 tempest-AttachVolumeNegativeTest-1446100430 tempest-AttachVolumeNegativeTest-1446100430-project-member] Lock "25f6fa54-3d0d-4470-b5c9-06ba0ad2663d" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 610.770s {{(pid=61962) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2019.477643] env[61962]: DEBUG oslo_concurrency.lockutils [None req-24d091a0-c9e0-49ad-b717-f7f80f216b58 tempest-AttachVolumeNegativeTest-1446100430 tempest-AttachVolumeNegativeTest-1446100430-project-member] Lock "25f6fa54-3d0d-4470-b5c9-06ba0ad2663d" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 415.012s {{(pid=61962) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2019.477985] env[61962]: DEBUG oslo_concurrency.lockutils [None req-24d091a0-c9e0-49ad-b717-f7f80f216b58 tempest-AttachVolumeNegativeTest-1446100430 tempest-AttachVolumeNegativeTest-1446100430-project-member] Acquiring lock "25f6fa54-3d0d-4470-b5c9-06ba0ad2663d-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=61962) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2019.478624] env[61962]: DEBUG oslo_concurrency.lockutils [None req-24d091a0-c9e0-49ad-b717-f7f80f216b58 tempest-AttachVolumeNegativeTest-1446100430 tempest-AttachVolumeNegativeTest-1446100430-project-member] Lock "25f6fa54-3d0d-4470-b5c9-06ba0ad2663d-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=61962) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2019.478624] env[61962]: DEBUG oslo_concurrency.lockutils [None req-24d091a0-c9e0-49ad-b717-f7f80f216b58 tempest-AttachVolumeNegativeTest-1446100430 tempest-AttachVolumeNegativeTest-1446100430-project-member] Lock "25f6fa54-3d0d-4470-b5c9-06ba0ad2663d-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=61962) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2019.480601] env[61962]: INFO nova.compute.manager [None req-24d091a0-c9e0-49ad-b717-f7f80f216b58 tempest-AttachVolumeNegativeTest-1446100430 tempest-AttachVolumeNegativeTest-1446100430-project-member] [instance: 25f6fa54-3d0d-4470-b5c9-06ba0ad2663d] Terminating instance [ 2019.482640] env[61962]: DEBUG nova.compute.manager [None req-24d091a0-c9e0-49ad-b717-f7f80f216b58 tempest-AttachVolumeNegativeTest-1446100430 tempest-AttachVolumeNegativeTest-1446100430-project-member] [instance: 25f6fa54-3d0d-4470-b5c9-06ba0ad2663d] Start destroying the instance on the hypervisor. {{(pid=61962) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3142}} [ 2019.482834] env[61962]: DEBUG nova.virt.vmwareapi.vmops [None req-24d091a0-c9e0-49ad-b717-f7f80f216b58 tempest-AttachVolumeNegativeTest-1446100430 tempest-AttachVolumeNegativeTest-1446100430-project-member] [instance: 25f6fa54-3d0d-4470-b5c9-06ba0ad2663d] Destroying instance {{(pid=61962) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 2019.483318] env[61962]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-95d5b0af-3677-4192-8ad9-be74aa74bffb {{(pid=61962) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2019.492968] env[61962]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1e6dd55a-a82c-45c6-94ae-2deda512f14a {{(pid=61962) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2019.523492] env[61962]: WARNING nova.virt.vmwareapi.vmops [None req-24d091a0-c9e0-49ad-b717-f7f80f216b58 tempest-AttachVolumeNegativeTest-1446100430 tempest-AttachVolumeNegativeTest-1446100430-project-member] [instance: 25f6fa54-3d0d-4470-b5c9-06ba0ad2663d] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 25f6fa54-3d0d-4470-b5c9-06ba0ad2663d could not be found. [ 2019.523711] env[61962]: DEBUG nova.virt.vmwareapi.vmops [None req-24d091a0-c9e0-49ad-b717-f7f80f216b58 tempest-AttachVolumeNegativeTest-1446100430 tempest-AttachVolumeNegativeTest-1446100430-project-member] [instance: 25f6fa54-3d0d-4470-b5c9-06ba0ad2663d] Instance destroyed {{(pid=61962) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 2019.523895] env[61962]: INFO nova.compute.manager [None req-24d091a0-c9e0-49ad-b717-f7f80f216b58 tempest-AttachVolumeNegativeTest-1446100430 tempest-AttachVolumeNegativeTest-1446100430-project-member] [instance: 25f6fa54-3d0d-4470-b5c9-06ba0ad2663d] Took 0.04 seconds to destroy the instance on the hypervisor. [ 2019.524162] env[61962]: DEBUG oslo.service.loopingcall [None req-24d091a0-c9e0-49ad-b717-f7f80f216b58 tempest-AttachVolumeNegativeTest-1446100430 tempest-AttachVolumeNegativeTest-1446100430-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61962) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 2019.524402] env[61962]: DEBUG nova.compute.manager [-] [instance: 25f6fa54-3d0d-4470-b5c9-06ba0ad2663d] Deallocating network for instance {{(pid=61962) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2281}} [ 2019.524497] env[61962]: DEBUG nova.network.neutron [-] [instance: 25f6fa54-3d0d-4470-b5c9-06ba0ad2663d] deallocate_for_instance() {{(pid=61962) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 2019.556945] env[61962]: DEBUG nova.network.neutron [-] [instance: 25f6fa54-3d0d-4470-b5c9-06ba0ad2663d] Updating instance_info_cache with network_info: [] {{(pid=61962) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2019.565956] env[61962]: INFO nova.compute.manager [-] [instance: 25f6fa54-3d0d-4470-b5c9-06ba0ad2663d] Took 0.04 seconds to deallocate network for instance. [ 2019.660145] env[61962]: DEBUG oslo_concurrency.lockutils [None req-24d091a0-c9e0-49ad-b717-f7f80f216b58 tempest-AttachVolumeNegativeTest-1446100430 tempest-AttachVolumeNegativeTest-1446100430-project-member] Lock "25f6fa54-3d0d-4470-b5c9-06ba0ad2663d" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 0.183s {{(pid=61962) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2019.660973] env[61962]: DEBUG oslo_concurrency.lockutils [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Lock "25f6fa54-3d0d-4470-b5c9-06ba0ad2663d" acquired by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: waited 282.142s {{(pid=61962) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2019.661175] env[61962]: INFO nova.compute.manager [None req-ba337279-4320-40ca-b616-7e590432f203 None None] [instance: 25f6fa54-3d0d-4470-b5c9-06ba0ad2663d] During sync_power_state the instance has a pending task (deleting). Skip. [ 2019.661347] env[61962]: DEBUG oslo_concurrency.lockutils [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Lock "25f6fa54-3d0d-4470-b5c9-06ba0ad2663d" "released" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: held 0.000s {{(pid=61962) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2021.155072] env[61962]: DEBUG oslo_service.periodic_task [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=61962) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2023.158586] env[61962]: DEBUG oslo_service.periodic_task [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=61962) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2024.158631] env[61962]: DEBUG oslo_service.periodic_task [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=61962) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2026.159527] env[61962]: DEBUG oslo_service.periodic_task [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=61962) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2027.158917] env[61962]: DEBUG oslo_service.periodic_task [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=61962) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2027.159150] env[61962]: DEBUG nova.compute.manager [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Starting heal instance info cache {{(pid=61962) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10007}} [ 2027.159232] env[61962]: DEBUG nova.compute.manager [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Rebuilding the list of instances to heal {{(pid=61962) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10011}} [ 2027.174711] env[61962]: DEBUG nova.compute.manager [None req-ba337279-4320-40ca-b616-7e590432f203 None None] [instance: 1e71cc99-f026-4197-b7f1-ae18f7f99af6] Skipping network cache update for instance because it is Building. {{(pid=61962) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10020}} [ 2027.175068] env[61962]: DEBUG nova.compute.manager [None req-ba337279-4320-40ca-b616-7e590432f203 None None] [instance: 2cf5409f-eaff-4b63-b8bb-eb15a7a81760] Skipping network cache update for instance because it is Building. {{(pid=61962) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10020}} [ 2027.175068] env[61962]: DEBUG nova.compute.manager [None req-ba337279-4320-40ca-b616-7e590432f203 None None] [instance: 02c198ea-61de-4e70-866c-39781d353d04] Skipping network cache update for instance because it is Building. {{(pid=61962) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10020}} [ 2027.175205] env[61962]: DEBUG nova.compute.manager [None req-ba337279-4320-40ca-b616-7e590432f203 None None] [instance: 85003695-cb3e-4bce-9a3a-a1d799b36369] Skipping network cache update for instance because it is Building. {{(pid=61962) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10020}} [ 2027.175259] env[61962]: DEBUG nova.compute.manager [None req-ba337279-4320-40ca-b616-7e590432f203 None None] [instance: 1a74c820-67e9-424c-b4e6-333db3449251] Skipping network cache update for instance because it is Building. {{(pid=61962) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10020}} [ 2027.175370] env[61962]: DEBUG nova.compute.manager [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Didn't find any instances for network info cache update. {{(pid=61962) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10093}} [ 2029.159602] env[61962]: DEBUG oslo_service.periodic_task [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Running periodic task ComputeManager._cleanup_expired_console_auth_tokens {{(pid=61962) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2030.166062] env[61962]: DEBUG oslo_service.periodic_task [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=61962) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2030.166443] env[61962]: DEBUG oslo_service.periodic_task [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=61962) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2030.166495] env[61962]: DEBUG oslo_service.periodic_task [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Running periodic task ComputeManager._run_pending_deletes {{(pid=61962) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2030.166632] env[61962]: DEBUG nova.compute.manager [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Cleaning up deleted instances {{(pid=61962) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11307}} [ 2030.177151] env[61962]: DEBUG nova.compute.manager [None req-ba337279-4320-40ca-b616-7e590432f203 None None] There are 0 instances to clean {{(pid=61962) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11316}} [ 2031.159681] env[61962]: DEBUG oslo_service.periodic_task [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Running periodic task ComputeManager._cleanup_incomplete_migrations {{(pid=61962) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2031.159681] env[61962]: DEBUG nova.compute.manager [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Cleaning up deleted instances with incomplete migration {{(pid=61962) _cleanup_incomplete_migrations /opt/stack/nova/nova/compute/manager.py:11345}} [ 2067.846891] env[61962]: WARNING oslo_vmware.rw_handles [None req-8efd776f-39ec-4e55-8eeb-a7bd10a3e451 tempest-ListServerFiltersTestJSON-924365092 tempest-ListServerFiltersTestJSON-924365092-project-member] Error occurred while reading the HTTP response.: http.client.RemoteDisconnected: Remote end closed connection without response [ 2067.846891] env[61962]: ERROR oslo_vmware.rw_handles Traceback (most recent call last): [ 2067.846891] env[61962]: ERROR oslo_vmware.rw_handles File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py", line 283, in close [ 2067.846891] env[61962]: ERROR oslo_vmware.rw_handles self._conn.getresponse() [ 2067.846891] env[61962]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 1375, in getresponse [ 2067.846891] env[61962]: ERROR oslo_vmware.rw_handles response.begin() [ 2067.846891] env[61962]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 318, in begin [ 2067.846891] env[61962]: ERROR oslo_vmware.rw_handles version, status, reason = self._read_status() [ 2067.846891] env[61962]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 287, in _read_status [ 2067.846891] env[61962]: ERROR oslo_vmware.rw_handles raise RemoteDisconnected("Remote end closed connection without" [ 2067.846891] env[61962]: ERROR oslo_vmware.rw_handles http.client.RemoteDisconnected: Remote end closed connection without response [ 2067.846891] env[61962]: ERROR oslo_vmware.rw_handles [ 2067.846891] env[61962]: DEBUG nova.virt.vmwareapi.images [None req-8efd776f-39ec-4e55-8eeb-a7bd10a3e451 tempest-ListServerFiltersTestJSON-924365092 tempest-ListServerFiltersTestJSON-924365092-project-member] [instance: 1e71cc99-f026-4197-b7f1-ae18f7f99af6] Downloaded image file data f684bb17-3ab2-4bd5-a19e-4c36c57d0ebe to vmware_temp/3a395a09-faaa-48be-9107-145b1a21e81d/f684bb17-3ab2-4bd5-a19e-4c36c57d0ebe/tmp-sparse.vmdk on the data store datastore2 {{(pid=61962) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:258}} [ 2067.849020] env[61962]: DEBUG nova.virt.vmwareapi.vmops [None req-8efd776f-39ec-4e55-8eeb-a7bd10a3e451 tempest-ListServerFiltersTestJSON-924365092 tempest-ListServerFiltersTestJSON-924365092-project-member] [instance: 1e71cc99-f026-4197-b7f1-ae18f7f99af6] Caching image {{(pid=61962) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 2067.849272] env[61962]: DEBUG nova.virt.vmwareapi.vm_util [None req-8efd776f-39ec-4e55-8eeb-a7bd10a3e451 tempest-ListServerFiltersTestJSON-924365092 tempest-ListServerFiltersTestJSON-924365092-project-member] Copying Virtual Disk [datastore2] vmware_temp/3a395a09-faaa-48be-9107-145b1a21e81d/f684bb17-3ab2-4bd5-a19e-4c36c57d0ebe/tmp-sparse.vmdk to [datastore2] vmware_temp/3a395a09-faaa-48be-9107-145b1a21e81d/f684bb17-3ab2-4bd5-a19e-4c36c57d0ebe/f684bb17-3ab2-4bd5-a19e-4c36c57d0ebe.vmdk {{(pid=61962) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 2067.849574] env[61962]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-b74ea4fc-1d7d-48f6-990a-b7b31a2f72fa {{(pid=61962) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2067.858011] env[61962]: DEBUG oslo_vmware.api [None req-8efd776f-39ec-4e55-8eeb-a7bd10a3e451 tempest-ListServerFiltersTestJSON-924365092 tempest-ListServerFiltersTestJSON-924365092-project-member] Waiting for the task: (returnval){ [ 2067.858011] env[61962]: value = "task-4892096" [ 2067.858011] env[61962]: _type = "Task" [ 2067.858011] env[61962]: } to complete. {{(pid=61962) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2067.866777] env[61962]: DEBUG oslo_vmware.api [None req-8efd776f-39ec-4e55-8eeb-a7bd10a3e451 tempest-ListServerFiltersTestJSON-924365092 tempest-ListServerFiltersTestJSON-924365092-project-member] Task: {'id': task-4892096, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61962) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2068.369404] env[61962]: DEBUG oslo_vmware.exceptions [None req-8efd776f-39ec-4e55-8eeb-a7bd10a3e451 tempest-ListServerFiltersTestJSON-924365092 tempest-ListServerFiltersTestJSON-924365092-project-member] Fault InvalidArgument not matched. {{(pid=61962) get_fault_class /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/exceptions.py:290}} [ 2068.369755] env[61962]: DEBUG oslo_concurrency.lockutils [None req-8efd776f-39ec-4e55-8eeb-a7bd10a3e451 tempest-ListServerFiltersTestJSON-924365092 tempest-ListServerFiltersTestJSON-924365092-project-member] Releasing lock "[datastore2] devstack-image-cache_base/f684bb17-3ab2-4bd5-a19e-4c36c57d0ebe/f684bb17-3ab2-4bd5-a19e-4c36c57d0ebe.vmdk" {{(pid=61962) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2068.370282] env[61962]: ERROR nova.compute.manager [None req-8efd776f-39ec-4e55-8eeb-a7bd10a3e451 tempest-ListServerFiltersTestJSON-924365092 tempest-ListServerFiltersTestJSON-924365092-project-member] [instance: 1e71cc99-f026-4197-b7f1-ae18f7f99af6] Instance failed to spawn: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 2068.370282] env[61962]: Faults: ['InvalidArgument'] [ 2068.370282] env[61962]: ERROR nova.compute.manager [instance: 1e71cc99-f026-4197-b7f1-ae18f7f99af6] Traceback (most recent call last): [ 2068.370282] env[61962]: ERROR nova.compute.manager [instance: 1e71cc99-f026-4197-b7f1-ae18f7f99af6] File "/opt/stack/nova/nova/compute/manager.py", line 2886, in _build_resources [ 2068.370282] env[61962]: ERROR nova.compute.manager [instance: 1e71cc99-f026-4197-b7f1-ae18f7f99af6] yield resources [ 2068.370282] env[61962]: ERROR nova.compute.manager [instance: 1e71cc99-f026-4197-b7f1-ae18f7f99af6] File "/opt/stack/nova/nova/compute/manager.py", line 2633, in _build_and_run_instance [ 2068.370282] env[61962]: ERROR nova.compute.manager [instance: 1e71cc99-f026-4197-b7f1-ae18f7f99af6] self.driver.spawn(context, instance, image_meta, [ 2068.370282] env[61962]: ERROR nova.compute.manager [instance: 1e71cc99-f026-4197-b7f1-ae18f7f99af6] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 2068.370282] env[61962]: ERROR nova.compute.manager [instance: 1e71cc99-f026-4197-b7f1-ae18f7f99af6] self._vmops.spawn(context, instance, image_meta, injected_files, [ 2068.370282] env[61962]: ERROR nova.compute.manager [instance: 1e71cc99-f026-4197-b7f1-ae18f7f99af6] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 2068.370282] env[61962]: ERROR nova.compute.manager [instance: 1e71cc99-f026-4197-b7f1-ae18f7f99af6] self._fetch_image_if_missing(context, vi) [ 2068.370282] env[61962]: ERROR nova.compute.manager [instance: 1e71cc99-f026-4197-b7f1-ae18f7f99af6] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 2068.371147] env[61962]: ERROR nova.compute.manager [instance: 1e71cc99-f026-4197-b7f1-ae18f7f99af6] image_cache(vi, tmp_image_ds_loc) [ 2068.371147] env[61962]: ERROR nova.compute.manager [instance: 1e71cc99-f026-4197-b7f1-ae18f7f99af6] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 2068.371147] env[61962]: ERROR nova.compute.manager [instance: 1e71cc99-f026-4197-b7f1-ae18f7f99af6] vm_util.copy_virtual_disk( [ 2068.371147] env[61962]: ERROR nova.compute.manager [instance: 1e71cc99-f026-4197-b7f1-ae18f7f99af6] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 2068.371147] env[61962]: ERROR nova.compute.manager [instance: 1e71cc99-f026-4197-b7f1-ae18f7f99af6] session._wait_for_task(vmdk_copy_task) [ 2068.371147] env[61962]: ERROR nova.compute.manager [instance: 1e71cc99-f026-4197-b7f1-ae18f7f99af6] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 2068.371147] env[61962]: ERROR nova.compute.manager [instance: 1e71cc99-f026-4197-b7f1-ae18f7f99af6] return self.wait_for_task(task_ref) [ 2068.371147] env[61962]: ERROR nova.compute.manager [instance: 1e71cc99-f026-4197-b7f1-ae18f7f99af6] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 2068.371147] env[61962]: ERROR nova.compute.manager [instance: 1e71cc99-f026-4197-b7f1-ae18f7f99af6] return evt.wait() [ 2068.371147] env[61962]: ERROR nova.compute.manager [instance: 1e71cc99-f026-4197-b7f1-ae18f7f99af6] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 2068.371147] env[61962]: ERROR nova.compute.manager [instance: 1e71cc99-f026-4197-b7f1-ae18f7f99af6] result = hub.switch() [ 2068.371147] env[61962]: ERROR nova.compute.manager [instance: 1e71cc99-f026-4197-b7f1-ae18f7f99af6] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 2068.371147] env[61962]: ERROR nova.compute.manager [instance: 1e71cc99-f026-4197-b7f1-ae18f7f99af6] return self.greenlet.switch() [ 2068.371577] env[61962]: ERROR nova.compute.manager [instance: 1e71cc99-f026-4197-b7f1-ae18f7f99af6] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 2068.371577] env[61962]: ERROR nova.compute.manager [instance: 1e71cc99-f026-4197-b7f1-ae18f7f99af6] self.f(*self.args, **self.kw) [ 2068.371577] env[61962]: ERROR nova.compute.manager [instance: 1e71cc99-f026-4197-b7f1-ae18f7f99af6] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 2068.371577] env[61962]: ERROR nova.compute.manager [instance: 1e71cc99-f026-4197-b7f1-ae18f7f99af6] raise exceptions.translate_fault(task_info.error) [ 2068.371577] env[61962]: ERROR nova.compute.manager [instance: 1e71cc99-f026-4197-b7f1-ae18f7f99af6] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 2068.371577] env[61962]: ERROR nova.compute.manager [instance: 1e71cc99-f026-4197-b7f1-ae18f7f99af6] Faults: ['InvalidArgument'] [ 2068.371577] env[61962]: ERROR nova.compute.manager [instance: 1e71cc99-f026-4197-b7f1-ae18f7f99af6] [ 2068.371577] env[61962]: INFO nova.compute.manager [None req-8efd776f-39ec-4e55-8eeb-a7bd10a3e451 tempest-ListServerFiltersTestJSON-924365092 tempest-ListServerFiltersTestJSON-924365092-project-member] [instance: 1e71cc99-f026-4197-b7f1-ae18f7f99af6] Terminating instance [ 2068.372241] env[61962]: DEBUG oslo_concurrency.lockutils [None req-55c96cbf-6671-47d9-82d1-1db28c753841 tempest-ListServerFiltersTestJSON-924365092 tempest-ListServerFiltersTestJSON-924365092-project-member] Acquired lock "[datastore2] devstack-image-cache_base/f684bb17-3ab2-4bd5-a19e-4c36c57d0ebe/f684bb17-3ab2-4bd5-a19e-4c36c57d0ebe.vmdk" {{(pid=61962) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2068.372453] env[61962]: DEBUG nova.virt.vmwareapi.ds_util [None req-55c96cbf-6671-47d9-82d1-1db28c753841 tempest-ListServerFiltersTestJSON-924365092 tempest-ListServerFiltersTestJSON-924365092-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=61962) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 2068.372711] env[61962]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-964f3abf-c599-43de-8aa3-afbea3bcb0ae {{(pid=61962) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2068.375300] env[61962]: DEBUG nova.compute.manager [None req-8efd776f-39ec-4e55-8eeb-a7bd10a3e451 tempest-ListServerFiltersTestJSON-924365092 tempest-ListServerFiltersTestJSON-924365092-project-member] [instance: 1e71cc99-f026-4197-b7f1-ae18f7f99af6] Start destroying the instance on the hypervisor. {{(pid=61962) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3142}} [ 2068.375491] env[61962]: DEBUG nova.virt.vmwareapi.vmops [None req-8efd776f-39ec-4e55-8eeb-a7bd10a3e451 tempest-ListServerFiltersTestJSON-924365092 tempest-ListServerFiltersTestJSON-924365092-project-member] [instance: 1e71cc99-f026-4197-b7f1-ae18f7f99af6] Destroying instance {{(pid=61962) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 2068.376309] env[61962]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-57395ced-91d7-4db7-bd9f-95f69c7f0dfe {{(pid=61962) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2068.384322] env[61962]: DEBUG nova.virt.vmwareapi.vmops [None req-8efd776f-39ec-4e55-8eeb-a7bd10a3e451 tempest-ListServerFiltersTestJSON-924365092 tempest-ListServerFiltersTestJSON-924365092-project-member] [instance: 1e71cc99-f026-4197-b7f1-ae18f7f99af6] Unregistering the VM {{(pid=61962) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 2068.384530] env[61962]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-abbe43af-31fc-4b94-b215-aa85d54999f6 {{(pid=61962) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2068.387079] env[61962]: DEBUG nova.virt.vmwareapi.ds_util [None req-55c96cbf-6671-47d9-82d1-1db28c753841 tempest-ListServerFiltersTestJSON-924365092 tempest-ListServerFiltersTestJSON-924365092-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=61962) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 2068.387261] env[61962]: DEBUG nova.virt.vmwareapi.vmops [None req-55c96cbf-6671-47d9-82d1-1db28c753841 tempest-ListServerFiltersTestJSON-924365092 tempest-ListServerFiltersTestJSON-924365092-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=61962) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 2068.388311] env[61962]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-995ce241-9c60-4d41-9bc2-710d35ccc80e {{(pid=61962) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2068.394165] env[61962]: DEBUG oslo_vmware.api [None req-55c96cbf-6671-47d9-82d1-1db28c753841 tempest-ListServerFiltersTestJSON-924365092 tempest-ListServerFiltersTestJSON-924365092-project-member] Waiting for the task: (returnval){ [ 2068.394165] env[61962]: value = "session[5210918f-aadc-9b29-42fa-0929c2e42627]527c286b-4f26-7f0e-6adc-6d7ca96376ee" [ 2068.394165] env[61962]: _type = "Task" [ 2068.394165] env[61962]: } to complete. {{(pid=61962) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2068.402294] env[61962]: DEBUG oslo_vmware.api [None req-55c96cbf-6671-47d9-82d1-1db28c753841 tempest-ListServerFiltersTestJSON-924365092 tempest-ListServerFiltersTestJSON-924365092-project-member] Task: {'id': session[5210918f-aadc-9b29-42fa-0929c2e42627]527c286b-4f26-7f0e-6adc-6d7ca96376ee, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61962) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2068.452720] env[61962]: DEBUG nova.virt.vmwareapi.vmops [None req-8efd776f-39ec-4e55-8eeb-a7bd10a3e451 tempest-ListServerFiltersTestJSON-924365092 tempest-ListServerFiltersTestJSON-924365092-project-member] [instance: 1e71cc99-f026-4197-b7f1-ae18f7f99af6] Unregistered the VM {{(pid=61962) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 2068.453117] env[61962]: DEBUG nova.virt.vmwareapi.vmops [None req-8efd776f-39ec-4e55-8eeb-a7bd10a3e451 tempest-ListServerFiltersTestJSON-924365092 tempest-ListServerFiltersTestJSON-924365092-project-member] [instance: 1e71cc99-f026-4197-b7f1-ae18f7f99af6] Deleting contents of the VM from datastore datastore2 {{(pid=61962) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 2068.453322] env[61962]: DEBUG nova.virt.vmwareapi.ds_util [None req-8efd776f-39ec-4e55-8eeb-a7bd10a3e451 tempest-ListServerFiltersTestJSON-924365092 tempest-ListServerFiltersTestJSON-924365092-project-member] Deleting the datastore file [datastore2] 1e71cc99-f026-4197-b7f1-ae18f7f99af6 {{(pid=61962) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 2068.453601] env[61962]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-7af9028b-72a4-4840-abd5-11f581b58989 {{(pid=61962) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2068.461081] env[61962]: DEBUG oslo_vmware.api [None req-8efd776f-39ec-4e55-8eeb-a7bd10a3e451 tempest-ListServerFiltersTestJSON-924365092 tempest-ListServerFiltersTestJSON-924365092-project-member] Waiting for the task: (returnval){ [ 2068.461081] env[61962]: value = "task-4892098" [ 2068.461081] env[61962]: _type = "Task" [ 2068.461081] env[61962]: } to complete. {{(pid=61962) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2068.471105] env[61962]: DEBUG oslo_vmware.api [None req-8efd776f-39ec-4e55-8eeb-a7bd10a3e451 tempest-ListServerFiltersTestJSON-924365092 tempest-ListServerFiltersTestJSON-924365092-project-member] Task: {'id': task-4892098, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61962) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2068.905384] env[61962]: DEBUG nova.virt.vmwareapi.vmops [None req-55c96cbf-6671-47d9-82d1-1db28c753841 tempest-ListServerFiltersTestJSON-924365092 tempest-ListServerFiltersTestJSON-924365092-project-member] [instance: 2cf5409f-eaff-4b63-b8bb-eb15a7a81760] Preparing fetch location {{(pid=61962) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 2068.905783] env[61962]: DEBUG nova.virt.vmwareapi.ds_util [None req-55c96cbf-6671-47d9-82d1-1db28c753841 tempest-ListServerFiltersTestJSON-924365092 tempest-ListServerFiltersTestJSON-924365092-project-member] Creating directory with path [datastore2] vmware_temp/780b6d01-daf2-4eee-8f9f-291782f14797/f684bb17-3ab2-4bd5-a19e-4c36c57d0ebe {{(pid=61962) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 2068.905890] env[61962]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-35e4e4ba-4cc6-43dc-81ef-3e94c7118128 {{(pid=61962) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2068.918137] env[61962]: DEBUG nova.virt.vmwareapi.ds_util [None req-55c96cbf-6671-47d9-82d1-1db28c753841 tempest-ListServerFiltersTestJSON-924365092 tempest-ListServerFiltersTestJSON-924365092-project-member] Created directory with path [datastore2] vmware_temp/780b6d01-daf2-4eee-8f9f-291782f14797/f684bb17-3ab2-4bd5-a19e-4c36c57d0ebe {{(pid=61962) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 2068.918383] env[61962]: DEBUG nova.virt.vmwareapi.vmops [None req-55c96cbf-6671-47d9-82d1-1db28c753841 tempest-ListServerFiltersTestJSON-924365092 tempest-ListServerFiltersTestJSON-924365092-project-member] [instance: 2cf5409f-eaff-4b63-b8bb-eb15a7a81760] Fetch image to [datastore2] vmware_temp/780b6d01-daf2-4eee-8f9f-291782f14797/f684bb17-3ab2-4bd5-a19e-4c36c57d0ebe/tmp-sparse.vmdk {{(pid=61962) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 2068.918552] env[61962]: DEBUG nova.virt.vmwareapi.vmops [None req-55c96cbf-6671-47d9-82d1-1db28c753841 tempest-ListServerFiltersTestJSON-924365092 tempest-ListServerFiltersTestJSON-924365092-project-member] [instance: 2cf5409f-eaff-4b63-b8bb-eb15a7a81760] Downloading image file data f684bb17-3ab2-4bd5-a19e-4c36c57d0ebe to [datastore2] vmware_temp/780b6d01-daf2-4eee-8f9f-291782f14797/f684bb17-3ab2-4bd5-a19e-4c36c57d0ebe/tmp-sparse.vmdk on the data store datastore2 {{(pid=61962) _fetch_image_as_file /opt/stack/nova/nova/virt/vmwareapi/vmops.py:399}} [ 2068.919376] env[61962]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9fb674b7-3e8c-4618-b816-81933202361f {{(pid=61962) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2068.926677] env[61962]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a131e9f1-aab8-48d4-9eee-ea03b33b9c0d {{(pid=61962) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2068.936775] env[61962]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4c3a312b-d12b-4862-86e8-ec0dce29e630 {{(pid=61962) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2068.972710] env[61962]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-483ddaf4-9204-41b3-91bd-e4a08eb485a3 {{(pid=61962) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2068.981997] env[61962]: DEBUG oslo_vmware.service [-] Invoking SessionManager.AcquireGenericServiceTicket with opID=oslo.vmware-84c49162-01ee-47a1-a69f-4a8c6d9009ca {{(pid=61962) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2068.983835] env[61962]: DEBUG oslo_vmware.api [None req-8efd776f-39ec-4e55-8eeb-a7bd10a3e451 tempest-ListServerFiltersTestJSON-924365092 tempest-ListServerFiltersTestJSON-924365092-project-member] Task: {'id': task-4892098, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.082768} completed successfully. {{(pid=61962) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2068.984107] env[61962]: DEBUG nova.virt.vmwareapi.ds_util [None req-8efd776f-39ec-4e55-8eeb-a7bd10a3e451 tempest-ListServerFiltersTestJSON-924365092 tempest-ListServerFiltersTestJSON-924365092-project-member] Deleted the datastore file {{(pid=61962) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 2068.984298] env[61962]: DEBUG nova.virt.vmwareapi.vmops [None req-8efd776f-39ec-4e55-8eeb-a7bd10a3e451 tempest-ListServerFiltersTestJSON-924365092 tempest-ListServerFiltersTestJSON-924365092-project-member] [instance: 1e71cc99-f026-4197-b7f1-ae18f7f99af6] Deleted contents of the VM from datastore datastore2 {{(pid=61962) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 2068.984474] env[61962]: DEBUG nova.virt.vmwareapi.vmops [None req-8efd776f-39ec-4e55-8eeb-a7bd10a3e451 tempest-ListServerFiltersTestJSON-924365092 tempest-ListServerFiltersTestJSON-924365092-project-member] [instance: 1e71cc99-f026-4197-b7f1-ae18f7f99af6] Instance destroyed {{(pid=61962) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 2068.984651] env[61962]: INFO nova.compute.manager [None req-8efd776f-39ec-4e55-8eeb-a7bd10a3e451 tempest-ListServerFiltersTestJSON-924365092 tempest-ListServerFiltersTestJSON-924365092-project-member] [instance: 1e71cc99-f026-4197-b7f1-ae18f7f99af6] Took 0.61 seconds to destroy the instance on the hypervisor. [ 2068.986845] env[61962]: DEBUG nova.compute.claims [None req-8efd776f-39ec-4e55-8eeb-a7bd10a3e451 tempest-ListServerFiltersTestJSON-924365092 tempest-ListServerFiltersTestJSON-924365092-project-member] [instance: 1e71cc99-f026-4197-b7f1-ae18f7f99af6] Aborting claim: {{(pid=61962) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 2068.987033] env[61962]: DEBUG oslo_concurrency.lockutils [None req-8efd776f-39ec-4e55-8eeb-a7bd10a3e451 tempest-ListServerFiltersTestJSON-924365092 tempest-ListServerFiltersTestJSON-924365092-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=61962) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2068.987254] env[61962]: DEBUG oslo_concurrency.lockutils [None req-8efd776f-39ec-4e55-8eeb-a7bd10a3e451 tempest-ListServerFiltersTestJSON-924365092 tempest-ListServerFiltersTestJSON-924365092-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 0.000s {{(pid=61962) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2069.007499] env[61962]: DEBUG nova.virt.vmwareapi.images [None req-55c96cbf-6671-47d9-82d1-1db28c753841 tempest-ListServerFiltersTestJSON-924365092 tempest-ListServerFiltersTestJSON-924365092-project-member] [instance: 2cf5409f-eaff-4b63-b8bb-eb15a7a81760] Downloading image file data f684bb17-3ab2-4bd5-a19e-4c36c57d0ebe to the data store datastore2 {{(pid=61962) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:245}} [ 2069.123730] env[61962]: DEBUG nova.scheduler.client.report [None req-8efd776f-39ec-4e55-8eeb-a7bd10a3e451 tempest-ListServerFiltersTestJSON-924365092 tempest-ListServerFiltersTestJSON-924365092-project-member] Refreshing inventories for resource provider 5a20dc57-fddd-49ec-bab5-953a03eebaa1 {{(pid=61962) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:818}} [ 2069.140039] env[61962]: DEBUG nova.scheduler.client.report [None req-8efd776f-39ec-4e55-8eeb-a7bd10a3e451 tempest-ListServerFiltersTestJSON-924365092 tempest-ListServerFiltersTestJSON-924365092-project-member] Updating ProviderTree inventory for provider 5a20dc57-fddd-49ec-bab5-953a03eebaa1 from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 96, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61962) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:782}} [ 2069.140202] env[61962]: DEBUG nova.compute.provider_tree [None req-8efd776f-39ec-4e55-8eeb-a7bd10a3e451 tempest-ListServerFiltersTestJSON-924365092 tempest-ListServerFiltersTestJSON-924365092-project-member] Updating inventory in ProviderTree for provider 5a20dc57-fddd-49ec-bab5-953a03eebaa1 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 96, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61962) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 2069.152312] env[61962]: DEBUG nova.scheduler.client.report [None req-8efd776f-39ec-4e55-8eeb-a7bd10a3e451 tempest-ListServerFiltersTestJSON-924365092 tempest-ListServerFiltersTestJSON-924365092-project-member] Refreshing aggregate associations for resource provider 5a20dc57-fddd-49ec-bab5-953a03eebaa1, aggregates: None {{(pid=61962) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:827}} [ 2069.162733] env[61962]: DEBUG oslo_vmware.rw_handles [None req-55c96cbf-6671-47d9-82d1-1db28c753841 tempest-ListServerFiltersTestJSON-924365092 tempest-ListServerFiltersTestJSON-924365092-project-member] Creating HTTP connection to write to file with size = 21318656 and URL = https://esx7c2n2.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/780b6d01-daf2-4eee-8f9f-291782f14797/f684bb17-3ab2-4bd5-a19e-4c36c57d0ebe/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=61962) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 2069.217555] env[61962]: DEBUG nova.scheduler.client.report [None req-8efd776f-39ec-4e55-8eeb-a7bd10a3e451 tempest-ListServerFiltersTestJSON-924365092 tempest-ListServerFiltersTestJSON-924365092-project-member] Refreshing trait associations for resource provider 5a20dc57-fddd-49ec-bab5-953a03eebaa1, traits: COMPUTE_IMAGE_TYPE_ISO,COMPUTE_NODE,COMPUTE_IMAGE_TYPE_VMDK,COMPUTE_NET_ATTACH_INTERFACE,COMPUTE_SAME_HOST_COLD_MIGRATE {{(pid=61962) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:839}} [ 2069.222289] env[61962]: DEBUG oslo_vmware.rw_handles [None req-55c96cbf-6671-47d9-82d1-1db28c753841 tempest-ListServerFiltersTestJSON-924365092 tempest-ListServerFiltersTestJSON-924365092-project-member] Completed reading data from the image iterator. {{(pid=61962) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 2069.222474] env[61962]: DEBUG oslo_vmware.rw_handles [None req-55c96cbf-6671-47d9-82d1-1db28c753841 tempest-ListServerFiltersTestJSON-924365092 tempest-ListServerFiltersTestJSON-924365092-project-member] Closing write handle for https://esx7c2n2.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/780b6d01-daf2-4eee-8f9f-291782f14797/f684bb17-3ab2-4bd5-a19e-4c36c57d0ebe/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=61962) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:281}} [ 2069.301532] env[61962]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8925fe6c-c56e-4893-b5b3-473de37387e5 {{(pid=61962) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2069.310556] env[61962]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f370a212-6996-4293-8839-82a444321d7b {{(pid=61962) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2069.343365] env[61962]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b7e93251-3867-41fa-9456-3b7d6185dc77 {{(pid=61962) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2069.352036] env[61962]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6b737135-4fa5-4f60-8ad5-de1ebfdf04ec {{(pid=61962) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2069.366837] env[61962]: DEBUG nova.compute.provider_tree [None req-8efd776f-39ec-4e55-8eeb-a7bd10a3e451 tempest-ListServerFiltersTestJSON-924365092 tempest-ListServerFiltersTestJSON-924365092-project-member] Inventory has not changed in ProviderTree for provider: 5a20dc57-fddd-49ec-bab5-953a03eebaa1 {{(pid=61962) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2069.375680] env[61962]: DEBUG nova.scheduler.client.report [None req-8efd776f-39ec-4e55-8eeb-a7bd10a3e451 tempest-ListServerFiltersTestJSON-924365092 tempest-ListServerFiltersTestJSON-924365092-project-member] Inventory has not changed for provider 5a20dc57-fddd-49ec-bab5-953a03eebaa1 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 96, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61962) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 2069.390951] env[61962]: DEBUG oslo_concurrency.lockutils [None req-8efd776f-39ec-4e55-8eeb-a7bd10a3e451 tempest-ListServerFiltersTestJSON-924365092 tempest-ListServerFiltersTestJSON-924365092-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 0.404s {{(pid=61962) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2069.391516] env[61962]: ERROR nova.compute.manager [None req-8efd776f-39ec-4e55-8eeb-a7bd10a3e451 tempest-ListServerFiltersTestJSON-924365092 tempest-ListServerFiltersTestJSON-924365092-project-member] [instance: 1e71cc99-f026-4197-b7f1-ae18f7f99af6] Failed to build and run instance: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 2069.391516] env[61962]: Faults: ['InvalidArgument'] [ 2069.391516] env[61962]: ERROR nova.compute.manager [instance: 1e71cc99-f026-4197-b7f1-ae18f7f99af6] Traceback (most recent call last): [ 2069.391516] env[61962]: ERROR nova.compute.manager [instance: 1e71cc99-f026-4197-b7f1-ae18f7f99af6] File "/opt/stack/nova/nova/compute/manager.py", line 2633, in _build_and_run_instance [ 2069.391516] env[61962]: ERROR nova.compute.manager [instance: 1e71cc99-f026-4197-b7f1-ae18f7f99af6] self.driver.spawn(context, instance, image_meta, [ 2069.391516] env[61962]: ERROR nova.compute.manager [instance: 1e71cc99-f026-4197-b7f1-ae18f7f99af6] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 2069.391516] env[61962]: ERROR nova.compute.manager [instance: 1e71cc99-f026-4197-b7f1-ae18f7f99af6] self._vmops.spawn(context, instance, image_meta, injected_files, [ 2069.391516] env[61962]: ERROR nova.compute.manager [instance: 1e71cc99-f026-4197-b7f1-ae18f7f99af6] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 2069.391516] env[61962]: ERROR nova.compute.manager [instance: 1e71cc99-f026-4197-b7f1-ae18f7f99af6] self._fetch_image_if_missing(context, vi) [ 2069.391516] env[61962]: ERROR nova.compute.manager [instance: 1e71cc99-f026-4197-b7f1-ae18f7f99af6] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 2069.391516] env[61962]: ERROR nova.compute.manager [instance: 1e71cc99-f026-4197-b7f1-ae18f7f99af6] image_cache(vi, tmp_image_ds_loc) [ 2069.391516] env[61962]: ERROR nova.compute.manager [instance: 1e71cc99-f026-4197-b7f1-ae18f7f99af6] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 2069.392250] env[61962]: ERROR nova.compute.manager [instance: 1e71cc99-f026-4197-b7f1-ae18f7f99af6] vm_util.copy_virtual_disk( [ 2069.392250] env[61962]: ERROR nova.compute.manager [instance: 1e71cc99-f026-4197-b7f1-ae18f7f99af6] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 2069.392250] env[61962]: ERROR nova.compute.manager [instance: 1e71cc99-f026-4197-b7f1-ae18f7f99af6] session._wait_for_task(vmdk_copy_task) [ 2069.392250] env[61962]: ERROR nova.compute.manager [instance: 1e71cc99-f026-4197-b7f1-ae18f7f99af6] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 2069.392250] env[61962]: ERROR nova.compute.manager [instance: 1e71cc99-f026-4197-b7f1-ae18f7f99af6] return self.wait_for_task(task_ref) [ 2069.392250] env[61962]: ERROR nova.compute.manager [instance: 1e71cc99-f026-4197-b7f1-ae18f7f99af6] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 2069.392250] env[61962]: ERROR nova.compute.manager [instance: 1e71cc99-f026-4197-b7f1-ae18f7f99af6] return evt.wait() [ 2069.392250] env[61962]: ERROR nova.compute.manager [instance: 1e71cc99-f026-4197-b7f1-ae18f7f99af6] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 2069.392250] env[61962]: ERROR nova.compute.manager [instance: 1e71cc99-f026-4197-b7f1-ae18f7f99af6] result = hub.switch() [ 2069.392250] env[61962]: ERROR nova.compute.manager [instance: 1e71cc99-f026-4197-b7f1-ae18f7f99af6] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 2069.392250] env[61962]: ERROR nova.compute.manager [instance: 1e71cc99-f026-4197-b7f1-ae18f7f99af6] return self.greenlet.switch() [ 2069.392250] env[61962]: ERROR nova.compute.manager [instance: 1e71cc99-f026-4197-b7f1-ae18f7f99af6] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 2069.392250] env[61962]: ERROR nova.compute.manager [instance: 1e71cc99-f026-4197-b7f1-ae18f7f99af6] self.f(*self.args, **self.kw) [ 2069.393019] env[61962]: ERROR nova.compute.manager [instance: 1e71cc99-f026-4197-b7f1-ae18f7f99af6] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 2069.393019] env[61962]: ERROR nova.compute.manager [instance: 1e71cc99-f026-4197-b7f1-ae18f7f99af6] raise exceptions.translate_fault(task_info.error) [ 2069.393019] env[61962]: ERROR nova.compute.manager [instance: 1e71cc99-f026-4197-b7f1-ae18f7f99af6] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 2069.393019] env[61962]: ERROR nova.compute.manager [instance: 1e71cc99-f026-4197-b7f1-ae18f7f99af6] Faults: ['InvalidArgument'] [ 2069.393019] env[61962]: ERROR nova.compute.manager [instance: 1e71cc99-f026-4197-b7f1-ae18f7f99af6] [ 2069.393019] env[61962]: DEBUG nova.compute.utils [None req-8efd776f-39ec-4e55-8eeb-a7bd10a3e451 tempest-ListServerFiltersTestJSON-924365092 tempest-ListServerFiltersTestJSON-924365092-project-member] [instance: 1e71cc99-f026-4197-b7f1-ae18f7f99af6] VimFaultException {{(pid=61962) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 2069.393710] env[61962]: DEBUG nova.compute.manager [None req-8efd776f-39ec-4e55-8eeb-a7bd10a3e451 tempest-ListServerFiltersTestJSON-924365092 tempest-ListServerFiltersTestJSON-924365092-project-member] [instance: 1e71cc99-f026-4197-b7f1-ae18f7f99af6] Build of instance 1e71cc99-f026-4197-b7f1-ae18f7f99af6 was re-scheduled: A specified parameter was not correct: fileType [ 2069.393710] env[61962]: Faults: ['InvalidArgument'] {{(pid=61962) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2472}} [ 2069.394093] env[61962]: DEBUG nova.compute.manager [None req-8efd776f-39ec-4e55-8eeb-a7bd10a3e451 tempest-ListServerFiltersTestJSON-924365092 tempest-ListServerFiltersTestJSON-924365092-project-member] [instance: 1e71cc99-f026-4197-b7f1-ae18f7f99af6] Unplugging VIFs for instance {{(pid=61962) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:2998}} [ 2069.394271] env[61962]: DEBUG nova.compute.manager [None req-8efd776f-39ec-4e55-8eeb-a7bd10a3e451 tempest-ListServerFiltersTestJSON-924365092 tempest-ListServerFiltersTestJSON-924365092-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=61962) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3021}} [ 2069.394442] env[61962]: DEBUG nova.compute.manager [None req-8efd776f-39ec-4e55-8eeb-a7bd10a3e451 tempest-ListServerFiltersTestJSON-924365092 tempest-ListServerFiltersTestJSON-924365092-project-member] [instance: 1e71cc99-f026-4197-b7f1-ae18f7f99af6] Deallocating network for instance {{(pid=61962) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2281}} [ 2069.394603] env[61962]: DEBUG nova.network.neutron [None req-8efd776f-39ec-4e55-8eeb-a7bd10a3e451 tempest-ListServerFiltersTestJSON-924365092 tempest-ListServerFiltersTestJSON-924365092-project-member] [instance: 1e71cc99-f026-4197-b7f1-ae18f7f99af6] deallocate_for_instance() {{(pid=61962) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 2069.714198] env[61962]: DEBUG nova.network.neutron [None req-8efd776f-39ec-4e55-8eeb-a7bd10a3e451 tempest-ListServerFiltersTestJSON-924365092 tempest-ListServerFiltersTestJSON-924365092-project-member] [instance: 1e71cc99-f026-4197-b7f1-ae18f7f99af6] Updating instance_info_cache with network_info: [] {{(pid=61962) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2069.728352] env[61962]: INFO nova.compute.manager [None req-8efd776f-39ec-4e55-8eeb-a7bd10a3e451 tempest-ListServerFiltersTestJSON-924365092 tempest-ListServerFiltersTestJSON-924365092-project-member] [instance: 1e71cc99-f026-4197-b7f1-ae18f7f99af6] Took 0.33 seconds to deallocate network for instance. [ 2069.861369] env[61962]: INFO nova.scheduler.client.report [None req-8efd776f-39ec-4e55-8eeb-a7bd10a3e451 tempest-ListServerFiltersTestJSON-924365092 tempest-ListServerFiltersTestJSON-924365092-project-member] Deleted allocations for instance 1e71cc99-f026-4197-b7f1-ae18f7f99af6 [ 2069.895797] env[61962]: DEBUG oslo_concurrency.lockutils [None req-8efd776f-39ec-4e55-8eeb-a7bd10a3e451 tempest-ListServerFiltersTestJSON-924365092 tempest-ListServerFiltersTestJSON-924365092-project-member] Lock "1e71cc99-f026-4197-b7f1-ae18f7f99af6" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 616.124s {{(pid=61962) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2069.896176] env[61962]: DEBUG oslo_concurrency.lockutils [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Lock "1e71cc99-f026-4197-b7f1-ae18f7f99af6" acquired by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: waited 332.377s {{(pid=61962) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2069.896176] env[61962]: INFO nova.compute.manager [None req-ba337279-4320-40ca-b616-7e590432f203 None None] [instance: 1e71cc99-f026-4197-b7f1-ae18f7f99af6] During sync_power_state the instance has a pending task (spawning). Skip. [ 2069.896331] env[61962]: DEBUG oslo_concurrency.lockutils [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Lock "1e71cc99-f026-4197-b7f1-ae18f7f99af6" "released" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: held 0.000s {{(pid=61962) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2069.896829] env[61962]: DEBUG oslo_concurrency.lockutils [None req-0592c39d-dc1b-4980-a48c-92d3fed7e33c tempest-ListServerFiltersTestJSON-924365092 tempest-ListServerFiltersTestJSON-924365092-project-member] Lock "1e71cc99-f026-4197-b7f1-ae18f7f99af6" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 222.663s {{(pid=61962) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2069.897073] env[61962]: DEBUG oslo_concurrency.lockutils [None req-0592c39d-dc1b-4980-a48c-92d3fed7e33c tempest-ListServerFiltersTestJSON-924365092 tempest-ListServerFiltersTestJSON-924365092-project-member] Acquiring lock "1e71cc99-f026-4197-b7f1-ae18f7f99af6-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=61962) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2069.897284] env[61962]: DEBUG oslo_concurrency.lockutils [None req-0592c39d-dc1b-4980-a48c-92d3fed7e33c tempest-ListServerFiltersTestJSON-924365092 tempest-ListServerFiltersTestJSON-924365092-project-member] Lock "1e71cc99-f026-4197-b7f1-ae18f7f99af6-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=61962) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2069.897451] env[61962]: DEBUG oslo_concurrency.lockutils [None req-0592c39d-dc1b-4980-a48c-92d3fed7e33c tempest-ListServerFiltersTestJSON-924365092 tempest-ListServerFiltersTestJSON-924365092-project-member] Lock "1e71cc99-f026-4197-b7f1-ae18f7f99af6-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=61962) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2069.899721] env[61962]: INFO nova.compute.manager [None req-0592c39d-dc1b-4980-a48c-92d3fed7e33c tempest-ListServerFiltersTestJSON-924365092 tempest-ListServerFiltersTestJSON-924365092-project-member] [instance: 1e71cc99-f026-4197-b7f1-ae18f7f99af6] Terminating instance [ 2069.902347] env[61962]: DEBUG nova.compute.manager [None req-0592c39d-dc1b-4980-a48c-92d3fed7e33c tempest-ListServerFiltersTestJSON-924365092 tempest-ListServerFiltersTestJSON-924365092-project-member] [instance: 1e71cc99-f026-4197-b7f1-ae18f7f99af6] Start destroying the instance on the hypervisor. {{(pid=61962) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3142}} [ 2069.902544] env[61962]: DEBUG nova.virt.vmwareapi.vmops [None req-0592c39d-dc1b-4980-a48c-92d3fed7e33c tempest-ListServerFiltersTestJSON-924365092 tempest-ListServerFiltersTestJSON-924365092-project-member] [instance: 1e71cc99-f026-4197-b7f1-ae18f7f99af6] Destroying instance {{(pid=61962) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 2069.902818] env[61962]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-bbcbe768-b4f5-49fc-916d-760b391a0d25 {{(pid=61962) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2069.912786] env[61962]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-91b90699-040c-4d68-9b1f-1b50354da0a8 {{(pid=61962) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2069.941777] env[61962]: WARNING nova.virt.vmwareapi.vmops [None req-0592c39d-dc1b-4980-a48c-92d3fed7e33c tempest-ListServerFiltersTestJSON-924365092 tempest-ListServerFiltersTestJSON-924365092-project-member] [instance: 1e71cc99-f026-4197-b7f1-ae18f7f99af6] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 1e71cc99-f026-4197-b7f1-ae18f7f99af6 could not be found. [ 2069.941991] env[61962]: DEBUG nova.virt.vmwareapi.vmops [None req-0592c39d-dc1b-4980-a48c-92d3fed7e33c tempest-ListServerFiltersTestJSON-924365092 tempest-ListServerFiltersTestJSON-924365092-project-member] [instance: 1e71cc99-f026-4197-b7f1-ae18f7f99af6] Instance destroyed {{(pid=61962) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 2069.942184] env[61962]: INFO nova.compute.manager [None req-0592c39d-dc1b-4980-a48c-92d3fed7e33c tempest-ListServerFiltersTestJSON-924365092 tempest-ListServerFiltersTestJSON-924365092-project-member] [instance: 1e71cc99-f026-4197-b7f1-ae18f7f99af6] Took 0.04 seconds to destroy the instance on the hypervisor. [ 2069.942435] env[61962]: DEBUG oslo.service.loopingcall [None req-0592c39d-dc1b-4980-a48c-92d3fed7e33c tempest-ListServerFiltersTestJSON-924365092 tempest-ListServerFiltersTestJSON-924365092-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61962) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 2069.942663] env[61962]: DEBUG nova.compute.manager [-] [instance: 1e71cc99-f026-4197-b7f1-ae18f7f99af6] Deallocating network for instance {{(pid=61962) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2281}} [ 2069.942760] env[61962]: DEBUG nova.network.neutron [-] [instance: 1e71cc99-f026-4197-b7f1-ae18f7f99af6] deallocate_for_instance() {{(pid=61962) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 2069.970776] env[61962]: DEBUG nova.network.neutron [-] [instance: 1e71cc99-f026-4197-b7f1-ae18f7f99af6] Updating instance_info_cache with network_info: [] {{(pid=61962) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2069.984060] env[61962]: INFO nova.compute.manager [-] [instance: 1e71cc99-f026-4197-b7f1-ae18f7f99af6] Took 0.04 seconds to deallocate network for instance. [ 2070.135201] env[61962]: DEBUG oslo_concurrency.lockutils [None req-0592c39d-dc1b-4980-a48c-92d3fed7e33c tempest-ListServerFiltersTestJSON-924365092 tempest-ListServerFiltersTestJSON-924365092-project-member] Lock "1e71cc99-f026-4197-b7f1-ae18f7f99af6" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 0.238s {{(pid=61962) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2072.168335] env[61962]: DEBUG oslo_service.periodic_task [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Running periodic task ComputeManager.update_available_resource {{(pid=61962) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2072.182939] env[61962]: DEBUG oslo_concurrency.lockutils [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=61962) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2072.183188] env[61962]: DEBUG oslo_concurrency.lockutils [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=61962) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2072.183387] env[61962]: DEBUG oslo_concurrency.lockutils [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=61962) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2072.183559] env[61962]: DEBUG nova.compute.resource_tracker [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=61962) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 2072.184651] env[61962]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e06fdc0e-dfa8-436d-9910-7dddf0c6033a {{(pid=61962) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2072.193875] env[61962]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2e853bb2-d76b-4042-a8a9-ae4e6c11fa13 {{(pid=61962) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2072.210262] env[61962]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b2e1b2a8-695e-4418-851d-7e12c99b9d8d {{(pid=61962) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2072.217926] env[61962]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-45a92955-2ed1-4c89-b9a5-e1067f37ebf3 {{(pid=61962) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2072.248760] env[61962]: DEBUG nova.compute.resource_tracker [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=180563MB free_disk=96GB free_vcpus=48 pci_devices=None {{(pid=61962) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 2072.248922] env[61962]: DEBUG oslo_concurrency.lockutils [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=61962) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2072.249196] env[61962]: DEBUG oslo_concurrency.lockutils [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=61962) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2072.322644] env[61962]: DEBUG nova.compute.resource_tracker [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Instance 2cf5409f-eaff-4b63-b8bb-eb15a7a81760 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61962) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 2072.322842] env[61962]: DEBUG nova.compute.resource_tracker [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Instance 02c198ea-61de-4e70-866c-39781d353d04 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61962) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 2072.323013] env[61962]: DEBUG nova.compute.resource_tracker [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Instance 85003695-cb3e-4bce-9a3a-a1d799b36369 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61962) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 2072.323188] env[61962]: DEBUG nova.compute.resource_tracker [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Instance 1a74c820-67e9-424c-b4e6-333db3449251 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61962) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 2072.324077] env[61962]: DEBUG nova.compute.resource_tracker [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Total usable vcpus: 48, total allocated vcpus: 4 {{(pid=61962) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 2072.324077] env[61962]: DEBUG nova.compute.resource_tracker [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=1024MB phys_disk=100GB used_disk=4GB total_vcpus=48 used_vcpus=4 pci_stats=[] stats={'failed_builds': '84', 'num_instances': '4', 'num_vm_building': '4', 'num_task_deleting': '3', 'num_os_type_None': '4', 'num_proj_56bb35c701074f02820d93ad5911d089': '1', 'io_workload': '4', 'num_proj_cc34536d51ef4a22aef0b274ba5aa28e': '2', 'num_proj_e58b68272c404983993a09ad12e8c03b': '1', 'num_task_spawning': '1'} {{(pid=61962) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 2072.409877] env[61962]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5fee5051-cb4c-4a07-b2e9-21e2d8e5edcf {{(pid=61962) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2072.418897] env[61962]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0ff6e608-a667-4ff4-89de-19c8282494d1 {{(pid=61962) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2072.451234] env[61962]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-699da5a9-1e24-48a8-97eb-3fd01800ac08 {{(pid=61962) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2072.459281] env[61962]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3e81dfe0-ba4e-4d8f-af59-f8ed0b4e26e7 {{(pid=61962) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2072.473462] env[61962]: DEBUG nova.compute.provider_tree [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Inventory has not changed in ProviderTree for provider: 5a20dc57-fddd-49ec-bab5-953a03eebaa1 {{(pid=61962) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2072.483657] env[61962]: DEBUG nova.scheduler.client.report [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Inventory has not changed for provider 5a20dc57-fddd-49ec-bab5-953a03eebaa1 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 96, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61962) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 2072.503442] env[61962]: DEBUG nova.compute.resource_tracker [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=61962) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 2072.503442] env[61962]: DEBUG oslo_concurrency.lockutils [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 0.254s {{(pid=61962) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2078.493539] env[61962]: DEBUG oslo_service.periodic_task [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=61962) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2078.493921] env[61962]: DEBUG nova.compute.manager [None req-ba337279-4320-40ca-b616-7e590432f203 None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=61962) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10626}} [ 2081.154570] env[61962]: DEBUG oslo_service.periodic_task [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=61962) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2084.159144] env[61962]: DEBUG oslo_service.periodic_task [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=61962) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2084.159603] env[61962]: DEBUG oslo_service.periodic_task [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=61962) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2088.154786] env[61962]: DEBUG oslo_service.periodic_task [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Running periodic task ComputeManager._sync_scheduler_instance_info {{(pid=61962) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2088.173035] env[61962]: DEBUG oslo_service.periodic_task [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=61962) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2088.173244] env[61962]: DEBUG nova.compute.manager [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Starting heal instance info cache {{(pid=61962) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10007}} [ 2088.173244] env[61962]: DEBUG nova.compute.manager [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Rebuilding the list of instances to heal {{(pid=61962) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10011}} [ 2088.190042] env[61962]: DEBUG nova.compute.manager [None req-ba337279-4320-40ca-b616-7e590432f203 None None] [instance: 2cf5409f-eaff-4b63-b8bb-eb15a7a81760] Skipping network cache update for instance because it is Building. {{(pid=61962) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10020}} [ 2088.190042] env[61962]: DEBUG nova.compute.manager [None req-ba337279-4320-40ca-b616-7e590432f203 None None] [instance: 02c198ea-61de-4e70-866c-39781d353d04] Skipping network cache update for instance because it is Building. {{(pid=61962) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10020}} [ 2088.190042] env[61962]: DEBUG nova.compute.manager [None req-ba337279-4320-40ca-b616-7e590432f203 None None] [instance: 85003695-cb3e-4bce-9a3a-a1d799b36369] Skipping network cache update for instance because it is Building. {{(pid=61962) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10020}} [ 2088.190042] env[61962]: DEBUG nova.compute.manager [None req-ba337279-4320-40ca-b616-7e590432f203 None None] [instance: 1a74c820-67e9-424c-b4e6-333db3449251] Skipping network cache update for instance because it is Building. {{(pid=61962) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10020}} [ 2088.190042] env[61962]: DEBUG nova.compute.manager [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Didn't find any instances for network info cache update. {{(pid=61962) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10093}} [ 2088.190433] env[61962]: DEBUG oslo_service.periodic_task [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=61962) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2090.158625] env[61962]: DEBUG oslo_service.periodic_task [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=61962) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2090.158952] env[61962]: DEBUG oslo_service.periodic_task [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=61962) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2094.701815] env[61962]: DEBUG oslo_concurrency.lockutils [None req-6463bc87-9e12-470b-8f3e-ca5bce4cdd8d tempest-ServersTestJSON-536512511 tempest-ServersTestJSON-536512511-project-member] Acquiring lock "1a74c820-67e9-424c-b4e6-333db3449251" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=61962) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2095.194756] env[61962]: DEBUG oslo_concurrency.lockutils [None req-3a67e45f-5bb9-46ca-b715-37278cbfbd88 tempest-ServersTestBootFromVolume-1925232432 tempest-ServersTestBootFromVolume-1925232432-project-member] Acquiring lock "2a520bab-558e-49e8-9816-c1acc00663f6" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61962) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2095.194996] env[61962]: DEBUG oslo_concurrency.lockutils [None req-3a67e45f-5bb9-46ca-b715-37278cbfbd88 tempest-ServersTestBootFromVolume-1925232432 tempest-ServersTestBootFromVolume-1925232432-project-member] Lock "2a520bab-558e-49e8-9816-c1acc00663f6" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61962) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2095.206628] env[61962]: DEBUG nova.compute.manager [None req-3a67e45f-5bb9-46ca-b715-37278cbfbd88 tempest-ServersTestBootFromVolume-1925232432 tempest-ServersTestBootFromVolume-1925232432-project-member] [instance: 2a520bab-558e-49e8-9816-c1acc00663f6] Starting instance... {{(pid=61962) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2424}} [ 2095.265044] env[61962]: DEBUG oslo_concurrency.lockutils [None req-3a67e45f-5bb9-46ca-b715-37278cbfbd88 tempest-ServersTestBootFromVolume-1925232432 tempest-ServersTestBootFromVolume-1925232432-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61962) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2095.265307] env[61962]: DEBUG oslo_concurrency.lockutils [None req-3a67e45f-5bb9-46ca-b715-37278cbfbd88 tempest-ServersTestBootFromVolume-1925232432 tempest-ServersTestBootFromVolume-1925232432-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=61962) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2095.266980] env[61962]: INFO nova.compute.claims [None req-3a67e45f-5bb9-46ca-b715-37278cbfbd88 tempest-ServersTestBootFromVolume-1925232432 tempest-ServersTestBootFromVolume-1925232432-project-member] [instance: 2a520bab-558e-49e8-9816-c1acc00663f6] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 2095.393440] env[61962]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-44077642-f560-44f8-a84c-0f0dc3abee74 {{(pid=61962) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2095.401785] env[61962]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-86a9aa7b-4f32-4886-8c37-aa2c09bf29da {{(pid=61962) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2095.430983] env[61962]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2776e7cf-1c9f-42fe-9e73-29b58026b1ba {{(pid=61962) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2095.438350] env[61962]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-da43d1ed-b482-4e18-b817-7ea87a335269 {{(pid=61962) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2095.451660] env[61962]: DEBUG nova.compute.provider_tree [None req-3a67e45f-5bb9-46ca-b715-37278cbfbd88 tempest-ServersTestBootFromVolume-1925232432 tempest-ServersTestBootFromVolume-1925232432-project-member] Inventory has not changed in ProviderTree for provider: 5a20dc57-fddd-49ec-bab5-953a03eebaa1 {{(pid=61962) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2095.461866] env[61962]: DEBUG nova.scheduler.client.report [None req-3a67e45f-5bb9-46ca-b715-37278cbfbd88 tempest-ServersTestBootFromVolume-1925232432 tempest-ServersTestBootFromVolume-1925232432-project-member] Inventory has not changed for provider 5a20dc57-fddd-49ec-bab5-953a03eebaa1 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 96, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61962) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 2095.476509] env[61962]: DEBUG oslo_concurrency.lockutils [None req-3a67e45f-5bb9-46ca-b715-37278cbfbd88 tempest-ServersTestBootFromVolume-1925232432 tempest-ServersTestBootFromVolume-1925232432-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.211s {{(pid=61962) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2095.476867] env[61962]: DEBUG nova.compute.manager [None req-3a67e45f-5bb9-46ca-b715-37278cbfbd88 tempest-ServersTestBootFromVolume-1925232432 tempest-ServersTestBootFromVolume-1925232432-project-member] [instance: 2a520bab-558e-49e8-9816-c1acc00663f6] Start building networks asynchronously for instance. {{(pid=61962) _build_resources /opt/stack/nova/nova/compute/manager.py:2821}} [ 2095.509866] env[61962]: DEBUG nova.compute.utils [None req-3a67e45f-5bb9-46ca-b715-37278cbfbd88 tempest-ServersTestBootFromVolume-1925232432 tempest-ServersTestBootFromVolume-1925232432-project-member] Using /dev/sd instead of None {{(pid=61962) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 2095.511134] env[61962]: DEBUG nova.compute.manager [None req-3a67e45f-5bb9-46ca-b715-37278cbfbd88 tempest-ServersTestBootFromVolume-1925232432 tempest-ServersTestBootFromVolume-1925232432-project-member] [instance: 2a520bab-558e-49e8-9816-c1acc00663f6] Allocating IP information in the background. {{(pid=61962) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1973}} [ 2095.511313] env[61962]: DEBUG nova.network.neutron [None req-3a67e45f-5bb9-46ca-b715-37278cbfbd88 tempest-ServersTestBootFromVolume-1925232432 tempest-ServersTestBootFromVolume-1925232432-project-member] [instance: 2a520bab-558e-49e8-9816-c1acc00663f6] allocate_for_instance() {{(pid=61962) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 2095.521014] env[61962]: DEBUG nova.compute.manager [None req-3a67e45f-5bb9-46ca-b715-37278cbfbd88 tempest-ServersTestBootFromVolume-1925232432 tempest-ServersTestBootFromVolume-1925232432-project-member] [instance: 2a520bab-558e-49e8-9816-c1acc00663f6] Start building block device mappings for instance. {{(pid=61962) _build_resources /opt/stack/nova/nova/compute/manager.py:2856}} [ 2095.557583] env[61962]: INFO nova.virt.block_device [None req-3a67e45f-5bb9-46ca-b715-37278cbfbd88 tempest-ServersTestBootFromVolume-1925232432 tempest-ServersTestBootFromVolume-1925232432-project-member] [instance: 2a520bab-558e-49e8-9816-c1acc00663f6] Booting with volume 283acf53-fbac-4900-a61f-a572b75ac300 at /dev/sda [ 2095.578908] env[61962]: DEBUG nova.policy [None req-3a67e45f-5bb9-46ca-b715-37278cbfbd88 tempest-ServersTestBootFromVolume-1925232432 tempest-ServersTestBootFromVolume-1925232432-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '9d0ecdabc3554d19a354833e30f37955', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'bab68afcfc3e4c6f852bb69600c0f69a', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61962) authorize /opt/stack/nova/nova/policy.py:203}} [ 2095.606045] env[61962]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-fb2b03f4-44df-4d3f-8654-836a7ca5266d {{(pid=61962) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2095.615064] env[61962]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-45eb980a-baba-4230-bfb2-635c3d59c056 {{(pid=61962) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2095.644572] env[61962]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-b6946ef5-5d45-4b55-b7ae-3f676b72e13c {{(pid=61962) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2095.652334] env[61962]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e10bda4d-4b79-4e41-97c5-aad77d3b8437 {{(pid=61962) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2095.680923] env[61962]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-911f483a-4bb0-480c-8a37-6dd04848f255 {{(pid=61962) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2095.687818] env[61962]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c7f61e7e-df91-44a9-92be-78d411434e0d {{(pid=61962) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2095.701766] env[61962]: DEBUG nova.virt.block_device [None req-3a67e45f-5bb9-46ca-b715-37278cbfbd88 tempest-ServersTestBootFromVolume-1925232432 tempest-ServersTestBootFromVolume-1925232432-project-member] [instance: 2a520bab-558e-49e8-9816-c1acc00663f6] Updating existing volume attachment record: c9f771b9-20fb-44a4-ab30-c7212b44e6ae {{(pid=61962) _volume_attach /opt/stack/nova/nova/virt/block_device.py:665}} [ 2095.984846] env[61962]: DEBUG nova.compute.manager [None req-3a67e45f-5bb9-46ca-b715-37278cbfbd88 tempest-ServersTestBootFromVolume-1925232432 tempest-ServersTestBootFromVolume-1925232432-project-member] [instance: 2a520bab-558e-49e8-9816-c1acc00663f6] Start spawning the instance on the hypervisor. {{(pid=61962) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2630}} [ 2095.985138] env[61962]: DEBUG nova.virt.hardware [None req-3a67e45f-5bb9-46ca-b715-37278cbfbd88 tempest-ServersTestBootFromVolume-1925232432 tempest-ServersTestBootFromVolume-1925232432-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-12-01T12:09:23Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=128,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format=,created_at=,direct_url=,disk_format=,id=,min_disk=0,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=1073741824,status='active',tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=61962) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 2095.985391] env[61962]: DEBUG nova.virt.hardware [None req-3a67e45f-5bb9-46ca-b715-37278cbfbd88 tempest-ServersTestBootFromVolume-1925232432 tempest-ServersTestBootFromVolume-1925232432-project-member] Flavor limits 0:0:0 {{(pid=61962) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 2095.985546] env[61962]: DEBUG nova.virt.hardware [None req-3a67e45f-5bb9-46ca-b715-37278cbfbd88 tempest-ServersTestBootFromVolume-1925232432 tempest-ServersTestBootFromVolume-1925232432-project-member] Image limits 0:0:0 {{(pid=61962) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 2095.985725] env[61962]: DEBUG nova.virt.hardware [None req-3a67e45f-5bb9-46ca-b715-37278cbfbd88 tempest-ServersTestBootFromVolume-1925232432 tempest-ServersTestBootFromVolume-1925232432-project-member] Flavor pref 0:0:0 {{(pid=61962) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 2095.986406] env[61962]: DEBUG nova.virt.hardware [None req-3a67e45f-5bb9-46ca-b715-37278cbfbd88 tempest-ServersTestBootFromVolume-1925232432 tempest-ServersTestBootFromVolume-1925232432-project-member] Image pref 0:0:0 {{(pid=61962) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 2095.986406] env[61962]: DEBUG nova.virt.hardware [None req-3a67e45f-5bb9-46ca-b715-37278cbfbd88 tempest-ServersTestBootFromVolume-1925232432 tempest-ServersTestBootFromVolume-1925232432-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61962) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 2095.986406] env[61962]: DEBUG nova.virt.hardware [None req-3a67e45f-5bb9-46ca-b715-37278cbfbd88 tempest-ServersTestBootFromVolume-1925232432 tempest-ServersTestBootFromVolume-1925232432-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61962) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 2095.986406] env[61962]: DEBUG nova.virt.hardware [None req-3a67e45f-5bb9-46ca-b715-37278cbfbd88 tempest-ServersTestBootFromVolume-1925232432 tempest-ServersTestBootFromVolume-1925232432-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61962) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 2095.987702] env[61962]: DEBUG nova.virt.hardware [None req-3a67e45f-5bb9-46ca-b715-37278cbfbd88 tempest-ServersTestBootFromVolume-1925232432 tempest-ServersTestBootFromVolume-1925232432-project-member] Got 1 possible topologies {{(pid=61962) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 2095.987702] env[61962]: DEBUG nova.virt.hardware [None req-3a67e45f-5bb9-46ca-b715-37278cbfbd88 tempest-ServersTestBootFromVolume-1925232432 tempest-ServersTestBootFromVolume-1925232432-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61962) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 2095.987702] env[61962]: DEBUG nova.virt.hardware [None req-3a67e45f-5bb9-46ca-b715-37278cbfbd88 tempest-ServersTestBootFromVolume-1925232432 tempest-ServersTestBootFromVolume-1925232432-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61962) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 2095.988219] env[61962]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-47c7dc5e-2934-4507-b120-df45f35114f0 {{(pid=61962) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2095.999109] env[61962]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-52bc37ea-5394-4c5a-bf2b-f474bebb2b6a {{(pid=61962) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2096.004803] env[61962]: DEBUG nova.network.neutron [None req-3a67e45f-5bb9-46ca-b715-37278cbfbd88 tempest-ServersTestBootFromVolume-1925232432 tempest-ServersTestBootFromVolume-1925232432-project-member] [instance: 2a520bab-558e-49e8-9816-c1acc00663f6] Successfully created port: b391cad7-579a-4129-a648-832d83c15c16 {{(pid=61962) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 2096.591271] env[61962]: DEBUG nova.compute.manager [req-7c9409a3-d2c4-41ad-8142-7b47310262fb req-ff11d40c-0876-4e9e-91c9-b532f701fb85 service nova] [instance: 2a520bab-558e-49e8-9816-c1acc00663f6] Received event network-vif-plugged-b391cad7-579a-4129-a648-832d83c15c16 {{(pid=61962) external_instance_event /opt/stack/nova/nova/compute/manager.py:11210}} [ 2096.591491] env[61962]: DEBUG oslo_concurrency.lockutils [req-7c9409a3-d2c4-41ad-8142-7b47310262fb req-ff11d40c-0876-4e9e-91c9-b532f701fb85 service nova] Acquiring lock "2a520bab-558e-49e8-9816-c1acc00663f6-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=61962) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2096.591697] env[61962]: DEBUG oslo_concurrency.lockutils [req-7c9409a3-d2c4-41ad-8142-7b47310262fb req-ff11d40c-0876-4e9e-91c9-b532f701fb85 service nova] Lock "2a520bab-558e-49e8-9816-c1acc00663f6-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=61962) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2096.591869] env[61962]: DEBUG oslo_concurrency.lockutils [req-7c9409a3-d2c4-41ad-8142-7b47310262fb req-ff11d40c-0876-4e9e-91c9-b532f701fb85 service nova] Lock "2a520bab-558e-49e8-9816-c1acc00663f6-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=61962) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2096.592113] env[61962]: DEBUG nova.compute.manager [req-7c9409a3-d2c4-41ad-8142-7b47310262fb req-ff11d40c-0876-4e9e-91c9-b532f701fb85 service nova] [instance: 2a520bab-558e-49e8-9816-c1acc00663f6] No waiting events found dispatching network-vif-plugged-b391cad7-579a-4129-a648-832d83c15c16 {{(pid=61962) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 2096.592335] env[61962]: WARNING nova.compute.manager [req-7c9409a3-d2c4-41ad-8142-7b47310262fb req-ff11d40c-0876-4e9e-91c9-b532f701fb85 service nova] [instance: 2a520bab-558e-49e8-9816-c1acc00663f6] Received unexpected event network-vif-plugged-b391cad7-579a-4129-a648-832d83c15c16 for instance with vm_state building and task_state spawning. [ 2096.671810] env[61962]: DEBUG nova.network.neutron [None req-3a67e45f-5bb9-46ca-b715-37278cbfbd88 tempest-ServersTestBootFromVolume-1925232432 tempest-ServersTestBootFromVolume-1925232432-project-member] [instance: 2a520bab-558e-49e8-9816-c1acc00663f6] Successfully updated port: b391cad7-579a-4129-a648-832d83c15c16 {{(pid=61962) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 2096.684353] env[61962]: DEBUG oslo_concurrency.lockutils [None req-3a67e45f-5bb9-46ca-b715-37278cbfbd88 tempest-ServersTestBootFromVolume-1925232432 tempest-ServersTestBootFromVolume-1925232432-project-member] Acquiring lock "refresh_cache-2a520bab-558e-49e8-9816-c1acc00663f6" {{(pid=61962) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2096.684484] env[61962]: DEBUG oslo_concurrency.lockutils [None req-3a67e45f-5bb9-46ca-b715-37278cbfbd88 tempest-ServersTestBootFromVolume-1925232432 tempest-ServersTestBootFromVolume-1925232432-project-member] Acquired lock "refresh_cache-2a520bab-558e-49e8-9816-c1acc00663f6" {{(pid=61962) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2096.684625] env[61962]: DEBUG nova.network.neutron [None req-3a67e45f-5bb9-46ca-b715-37278cbfbd88 tempest-ServersTestBootFromVolume-1925232432 tempest-ServersTestBootFromVolume-1925232432-project-member] [instance: 2a520bab-558e-49e8-9816-c1acc00663f6] Building network info cache for instance {{(pid=61962) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 2096.728634] env[61962]: DEBUG nova.network.neutron [None req-3a67e45f-5bb9-46ca-b715-37278cbfbd88 tempest-ServersTestBootFromVolume-1925232432 tempest-ServersTestBootFromVolume-1925232432-project-member] [instance: 2a520bab-558e-49e8-9816-c1acc00663f6] Instance cache missing network info. {{(pid=61962) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 2096.908296] env[61962]: DEBUG nova.network.neutron [None req-3a67e45f-5bb9-46ca-b715-37278cbfbd88 tempest-ServersTestBootFromVolume-1925232432 tempest-ServersTestBootFromVolume-1925232432-project-member] [instance: 2a520bab-558e-49e8-9816-c1acc00663f6] Updating instance_info_cache with network_info: [{"id": "b391cad7-579a-4129-a648-832d83c15c16", "address": "fa:16:3e:a5:6f:4c", "network": {"id": "23f760ff-3b92-41b1-bcdb-59357222b817", "bridge": "br-int", "label": "tempest-ServersTestBootFromVolume-571426812-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "bab68afcfc3e4c6f852bb69600c0f69a", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f4f4d0bb-61eb-4597-bc00-c9fdbc85f93d", "external-id": "nsx-vlan-transportzone-470", "segmentation_id": 470, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapb391cad7-57", "ovs_interfaceid": "b391cad7-579a-4129-a648-832d83c15c16", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61962) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2096.923344] env[61962]: DEBUG oslo_concurrency.lockutils [None req-3a67e45f-5bb9-46ca-b715-37278cbfbd88 tempest-ServersTestBootFromVolume-1925232432 tempest-ServersTestBootFromVolume-1925232432-project-member] Releasing lock "refresh_cache-2a520bab-558e-49e8-9816-c1acc00663f6" {{(pid=61962) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2096.923608] env[61962]: DEBUG nova.compute.manager [None req-3a67e45f-5bb9-46ca-b715-37278cbfbd88 tempest-ServersTestBootFromVolume-1925232432 tempest-ServersTestBootFromVolume-1925232432-project-member] [instance: 2a520bab-558e-49e8-9816-c1acc00663f6] Instance network_info: |[{"id": "b391cad7-579a-4129-a648-832d83c15c16", "address": "fa:16:3e:a5:6f:4c", "network": {"id": "23f760ff-3b92-41b1-bcdb-59357222b817", "bridge": "br-int", "label": "tempest-ServersTestBootFromVolume-571426812-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "bab68afcfc3e4c6f852bb69600c0f69a", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f4f4d0bb-61eb-4597-bc00-c9fdbc85f93d", "external-id": "nsx-vlan-transportzone-470", "segmentation_id": 470, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapb391cad7-57", "ovs_interfaceid": "b391cad7-579a-4129-a648-832d83c15c16", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=61962) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 2096.924029] env[61962]: DEBUG nova.virt.vmwareapi.vmops [None req-3a67e45f-5bb9-46ca-b715-37278cbfbd88 tempest-ServersTestBootFromVolume-1925232432 tempest-ServersTestBootFromVolume-1925232432-project-member] [instance: 2a520bab-558e-49e8-9816-c1acc00663f6] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:a5:6f:4c', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'f4f4d0bb-61eb-4597-bc00-c9fdbc85f93d', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'b391cad7-579a-4129-a648-832d83c15c16', 'vif_model': 'vmxnet3'}] {{(pid=61962) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 2096.931689] env[61962]: DEBUG nova.virt.vmwareapi.vm_util [None req-3a67e45f-5bb9-46ca-b715-37278cbfbd88 tempest-ServersTestBootFromVolume-1925232432 tempest-ServersTestBootFromVolume-1925232432-project-member] Creating folder: Project (bab68afcfc3e4c6f852bb69600c0f69a). Parent ref: group-v953327. {{(pid=61962) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 2096.932296] env[61962]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-53551981-5fbd-4c28-a3a4-b2243e1b447f {{(pid=61962) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2096.945840] env[61962]: WARNING suds.client [-] Web service reported a SOAP processing fault using an unexpected HTTP status code 200. Reporting as an internal server error. [ 2096.946015] env[61962]: DEBUG oslo_vmware.api [-] Fault list: [DuplicateName] {{(pid=61962) _invoke_api /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:337}} [ 2096.946353] env[61962]: DEBUG nova.virt.vmwareapi.vm_util [None req-3a67e45f-5bb9-46ca-b715-37278cbfbd88 tempest-ServersTestBootFromVolume-1925232432 tempest-ServersTestBootFromVolume-1925232432-project-member] Folder already exists: Project (bab68afcfc3e4c6f852bb69600c0f69a). Parent ref: group-v953327. {{(pid=61962) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 2096.946568] env[61962]: DEBUG nova.virt.vmwareapi.vm_util [None req-3a67e45f-5bb9-46ca-b715-37278cbfbd88 tempest-ServersTestBootFromVolume-1925232432 tempest-ServersTestBootFromVolume-1925232432-project-member] Creating folder: Instances. Parent ref: group-v953426. {{(pid=61962) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 2096.946822] env[61962]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-08e45ae3-5f6a-4e64-9bab-74b5e882e192 {{(pid=61962) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2096.956175] env[61962]: INFO nova.virt.vmwareapi.vm_util [None req-3a67e45f-5bb9-46ca-b715-37278cbfbd88 tempest-ServersTestBootFromVolume-1925232432 tempest-ServersTestBootFromVolume-1925232432-project-member] Created folder: Instances in parent group-v953426. [ 2096.956408] env[61962]: DEBUG oslo.service.loopingcall [None req-3a67e45f-5bb9-46ca-b715-37278cbfbd88 tempest-ServersTestBootFromVolume-1925232432 tempest-ServersTestBootFromVolume-1925232432-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=61962) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 2096.956597] env[61962]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 2a520bab-558e-49e8-9816-c1acc00663f6] Creating VM on the ESX host {{(pid=61962) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 2096.956791] env[61962]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-d8ca132b-c384-4abf-ab0d-d16c746d3fad {{(pid=61962) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2096.975813] env[61962]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 2096.975813] env[61962]: value = "task-4892111" [ 2096.975813] env[61962]: _type = "Task" [ 2096.975813] env[61962]: } to complete. {{(pid=61962) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2096.983644] env[61962]: DEBUG oslo_vmware.api [-] Task: {'id': task-4892111, 'name': CreateVM_Task} progress is 0%. {{(pid=61962) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2097.485859] env[61962]: DEBUG oslo_vmware.api [-] Task: {'id': task-4892111, 'name': CreateVM_Task, 'duration_secs': 0.312592} completed successfully. {{(pid=61962) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2097.486230] env[61962]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 2a520bab-558e-49e8-9816-c1acc00663f6] Created VM on the ESX host {{(pid=61962) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 2097.486722] env[61962]: DEBUG nova.virt.vmwareapi.vmops [None req-3a67e45f-5bb9-46ca-b715-37278cbfbd88 tempest-ServersTestBootFromVolume-1925232432 tempest-ServersTestBootFromVolume-1925232432-project-member] [instance: 2a520bab-558e-49e8-9816-c1acc00663f6] Block device information present: {'root_device_name': '/dev/sda', 'image': [], 'ephemerals': [], 'block_device_mapping': [{'connection_info': {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-953429', 'volume_id': '283acf53-fbac-4900-a61f-a572b75ac300', 'name': 'volume-283acf53-fbac-4900-a61f-a572b75ac300', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': '2a520bab-558e-49e8-9816-c1acc00663f6', 'attached_at': '', 'detached_at': '', 'volume_id': '283acf53-fbac-4900-a61f-a572b75ac300', 'serial': '283acf53-fbac-4900-a61f-a572b75ac300'}, 'disk_bus': None, 'mount_device': '/dev/sda', 'attachment_id': 'c9f771b9-20fb-44a4-ab30-c7212b44e6ae', 'device_type': None, 'delete_on_termination': True, 'boot_index': 0, 'guest_format': None, 'volume_type': None}], 'swap': None} {{(pid=61962) spawn /opt/stack/nova/nova/virt/vmwareapi/vmops.py:799}} [ 2097.486998] env[61962]: DEBUG nova.virt.vmwareapi.volumeops [None req-3a67e45f-5bb9-46ca-b715-37278cbfbd88 tempest-ServersTestBootFromVolume-1925232432 tempest-ServersTestBootFromVolume-1925232432-project-member] [instance: 2a520bab-558e-49e8-9816-c1acc00663f6] Root volume attach. Driver type: vmdk {{(pid=61962) attach_root_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:661}} [ 2097.487786] env[61962]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-aa5b792a-15ad-464f-ba14-bee78691d838 {{(pid=61962) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2097.495822] env[61962]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8776b962-0f3d-45db-a422-16c6ce42ad44 {{(pid=61962) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2097.501475] env[61962]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c4ed754b-465e-4bfc-81e0-3acebafc9d66 {{(pid=61962) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2097.507268] env[61962]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.RelocateVM_Task with opID=oslo.vmware-540b2deb-1079-448d-aab8-b7b6156af33b {{(pid=61962) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2097.514434] env[61962]: DEBUG oslo_vmware.api [None req-3a67e45f-5bb9-46ca-b715-37278cbfbd88 tempest-ServersTestBootFromVolume-1925232432 tempest-ServersTestBootFromVolume-1925232432-project-member] Waiting for the task: (returnval){ [ 2097.514434] env[61962]: value = "task-4892112" [ 2097.514434] env[61962]: _type = "Task" [ 2097.514434] env[61962]: } to complete. {{(pid=61962) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2097.522281] env[61962]: DEBUG oslo_vmware.api [None req-3a67e45f-5bb9-46ca-b715-37278cbfbd88 tempest-ServersTestBootFromVolume-1925232432 tempest-ServersTestBootFromVolume-1925232432-project-member] Task: {'id': task-4892112, 'name': RelocateVM_Task} progress is 5%. {{(pid=61962) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2098.024630] env[61962]: DEBUG oslo_vmware.api [None req-3a67e45f-5bb9-46ca-b715-37278cbfbd88 tempest-ServersTestBootFromVolume-1925232432 tempest-ServersTestBootFromVolume-1925232432-project-member] Task: {'id': task-4892112, 'name': RelocateVM_Task, 'duration_secs': 0.027152} completed successfully. {{(pid=61962) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2098.024985] env[61962]: DEBUG nova.virt.vmwareapi.volumeops [None req-3a67e45f-5bb9-46ca-b715-37278cbfbd88 tempest-ServersTestBootFromVolume-1925232432 tempest-ServersTestBootFromVolume-1925232432-project-member] [instance: 2a520bab-558e-49e8-9816-c1acc00663f6] Volume attach. Driver type: vmdk {{(pid=61962) attach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:439}} [ 2098.025243] env[61962]: DEBUG nova.virt.vmwareapi.volumeops [None req-3a67e45f-5bb9-46ca-b715-37278cbfbd88 tempest-ServersTestBootFromVolume-1925232432 tempest-ServersTestBootFromVolume-1925232432-project-member] [instance: 2a520bab-558e-49e8-9816-c1acc00663f6] _attach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-953429', 'volume_id': '283acf53-fbac-4900-a61f-a572b75ac300', 'name': 'volume-283acf53-fbac-4900-a61f-a572b75ac300', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': '2a520bab-558e-49e8-9816-c1acc00663f6', 'attached_at': '', 'detached_at': '', 'volume_id': '283acf53-fbac-4900-a61f-a572b75ac300', 'serial': '283acf53-fbac-4900-a61f-a572b75ac300'} {{(pid=61962) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:336}} [ 2098.026071] env[61962]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0e63a04e-5f05-4ca3-a56e-31034541ac89 {{(pid=61962) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2098.044009] env[61962]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b677c382-11a8-4f7d-8f49-85330021ac84 {{(pid=61962) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2098.066164] env[61962]: DEBUG nova.virt.vmwareapi.volumeops [None req-3a67e45f-5bb9-46ca-b715-37278cbfbd88 tempest-ServersTestBootFromVolume-1925232432 tempest-ServersTestBootFromVolume-1925232432-project-member] [instance: 2a520bab-558e-49e8-9816-c1acc00663f6] Reconfiguring VM instance instance-00000059 to attach disk [datastore2] volume-283acf53-fbac-4900-a61f-a572b75ac300/volume-283acf53-fbac-4900-a61f-a572b75ac300.vmdk or device None with type thin {{(pid=61962) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 2098.066450] env[61962]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-694ef5e8-ef44-460f-a6b5-fae10799c7ce {{(pid=61962) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2098.086497] env[61962]: DEBUG oslo_vmware.api [None req-3a67e45f-5bb9-46ca-b715-37278cbfbd88 tempest-ServersTestBootFromVolume-1925232432 tempest-ServersTestBootFromVolume-1925232432-project-member] Waiting for the task: (returnval){ [ 2098.086497] env[61962]: value = "task-4892113" [ 2098.086497] env[61962]: _type = "Task" [ 2098.086497] env[61962]: } to complete. {{(pid=61962) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2098.094882] env[61962]: DEBUG oslo_vmware.api [None req-3a67e45f-5bb9-46ca-b715-37278cbfbd88 tempest-ServersTestBootFromVolume-1925232432 tempest-ServersTestBootFromVolume-1925232432-project-member] Task: {'id': task-4892113, 'name': ReconfigVM_Task} progress is 6%. {{(pid=61962) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2098.597193] env[61962]: DEBUG oslo_vmware.api [None req-3a67e45f-5bb9-46ca-b715-37278cbfbd88 tempest-ServersTestBootFromVolume-1925232432 tempest-ServersTestBootFromVolume-1925232432-project-member] Task: {'id': task-4892113, 'name': ReconfigVM_Task, 'duration_secs': 0.251318} completed successfully. {{(pid=61962) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2098.597564] env[61962]: DEBUG nova.virt.vmwareapi.volumeops [None req-3a67e45f-5bb9-46ca-b715-37278cbfbd88 tempest-ServersTestBootFromVolume-1925232432 tempest-ServersTestBootFromVolume-1925232432-project-member] [instance: 2a520bab-558e-49e8-9816-c1acc00663f6] Reconfigured VM instance instance-00000059 to attach disk [datastore2] volume-283acf53-fbac-4900-a61f-a572b75ac300/volume-283acf53-fbac-4900-a61f-a572b75ac300.vmdk or device None with type thin {{(pid=61962) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 2098.602351] env[61962]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-b5fa2520-9fff-431c-bba2-5cc7c59308ab {{(pid=61962) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2098.617547] env[61962]: DEBUG oslo_vmware.api [None req-3a67e45f-5bb9-46ca-b715-37278cbfbd88 tempest-ServersTestBootFromVolume-1925232432 tempest-ServersTestBootFromVolume-1925232432-project-member] Waiting for the task: (returnval){ [ 2098.617547] env[61962]: value = "task-4892114" [ 2098.617547] env[61962]: _type = "Task" [ 2098.617547] env[61962]: } to complete. {{(pid=61962) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2098.619928] env[61962]: DEBUG nova.compute.manager [req-ccff272b-b796-4442-ad10-1dd25e8f6c89 req-4f185102-07c4-4036-9a11-d149c18d47b5 service nova] [instance: 2a520bab-558e-49e8-9816-c1acc00663f6] Received event network-changed-b391cad7-579a-4129-a648-832d83c15c16 {{(pid=61962) external_instance_event /opt/stack/nova/nova/compute/manager.py:11210}} [ 2098.620128] env[61962]: DEBUG nova.compute.manager [req-ccff272b-b796-4442-ad10-1dd25e8f6c89 req-4f185102-07c4-4036-9a11-d149c18d47b5 service nova] [instance: 2a520bab-558e-49e8-9816-c1acc00663f6] Refreshing instance network info cache due to event network-changed-b391cad7-579a-4129-a648-832d83c15c16. {{(pid=61962) external_instance_event /opt/stack/nova/nova/compute/manager.py:11215}} [ 2098.620336] env[61962]: DEBUG oslo_concurrency.lockutils [req-ccff272b-b796-4442-ad10-1dd25e8f6c89 req-4f185102-07c4-4036-9a11-d149c18d47b5 service nova] Acquiring lock "refresh_cache-2a520bab-558e-49e8-9816-c1acc00663f6" {{(pid=61962) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2098.620477] env[61962]: DEBUG oslo_concurrency.lockutils [req-ccff272b-b796-4442-ad10-1dd25e8f6c89 req-4f185102-07c4-4036-9a11-d149c18d47b5 service nova] Acquired lock "refresh_cache-2a520bab-558e-49e8-9816-c1acc00663f6" {{(pid=61962) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2098.620633] env[61962]: DEBUG nova.network.neutron [req-ccff272b-b796-4442-ad10-1dd25e8f6c89 req-4f185102-07c4-4036-9a11-d149c18d47b5 service nova] [instance: 2a520bab-558e-49e8-9816-c1acc00663f6] Refreshing network info cache for port b391cad7-579a-4129-a648-832d83c15c16 {{(pid=61962) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 2098.631156] env[61962]: DEBUG oslo_vmware.api [None req-3a67e45f-5bb9-46ca-b715-37278cbfbd88 tempest-ServersTestBootFromVolume-1925232432 tempest-ServersTestBootFromVolume-1925232432-project-member] Task: {'id': task-4892114, 'name': ReconfigVM_Task} progress is 10%. {{(pid=61962) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2098.898233] env[61962]: DEBUG nova.network.neutron [req-ccff272b-b796-4442-ad10-1dd25e8f6c89 req-4f185102-07c4-4036-9a11-d149c18d47b5 service nova] [instance: 2a520bab-558e-49e8-9816-c1acc00663f6] Updated VIF entry in instance network info cache for port b391cad7-579a-4129-a648-832d83c15c16. {{(pid=61962) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 2098.898609] env[61962]: DEBUG nova.network.neutron [req-ccff272b-b796-4442-ad10-1dd25e8f6c89 req-4f185102-07c4-4036-9a11-d149c18d47b5 service nova] [instance: 2a520bab-558e-49e8-9816-c1acc00663f6] Updating instance_info_cache with network_info: [{"id": "b391cad7-579a-4129-a648-832d83c15c16", "address": "fa:16:3e:a5:6f:4c", "network": {"id": "23f760ff-3b92-41b1-bcdb-59357222b817", "bridge": "br-int", "label": "tempest-ServersTestBootFromVolume-571426812-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "bab68afcfc3e4c6f852bb69600c0f69a", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f4f4d0bb-61eb-4597-bc00-c9fdbc85f93d", "external-id": "nsx-vlan-transportzone-470", "segmentation_id": 470, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapb391cad7-57", "ovs_interfaceid": "b391cad7-579a-4129-a648-832d83c15c16", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61962) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2098.908677] env[61962]: DEBUG oslo_concurrency.lockutils [req-ccff272b-b796-4442-ad10-1dd25e8f6c89 req-4f185102-07c4-4036-9a11-d149c18d47b5 service nova] Releasing lock "refresh_cache-2a520bab-558e-49e8-9816-c1acc00663f6" {{(pid=61962) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2099.129939] env[61962]: DEBUG oslo_vmware.api [None req-3a67e45f-5bb9-46ca-b715-37278cbfbd88 tempest-ServersTestBootFromVolume-1925232432 tempest-ServersTestBootFromVolume-1925232432-project-member] Task: {'id': task-4892114, 'name': ReconfigVM_Task, 'duration_secs': 0.117623} completed successfully. {{(pid=61962) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2099.130261] env[61962]: DEBUG nova.virt.vmwareapi.volumeops [None req-3a67e45f-5bb9-46ca-b715-37278cbfbd88 tempest-ServersTestBootFromVolume-1925232432 tempest-ServersTestBootFromVolume-1925232432-project-member] [instance: 2a520bab-558e-49e8-9816-c1acc00663f6] Attached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-953429', 'volume_id': '283acf53-fbac-4900-a61f-a572b75ac300', 'name': 'volume-283acf53-fbac-4900-a61f-a572b75ac300', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': '2a520bab-558e-49e8-9816-c1acc00663f6', 'attached_at': '', 'detached_at': '', 'volume_id': '283acf53-fbac-4900-a61f-a572b75ac300', 'serial': '283acf53-fbac-4900-a61f-a572b75ac300'} {{(pid=61962) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:361}} [ 2099.130899] env[61962]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-18d8e0b9-d1c6-474d-9035-9a20c49b546b {{(pid=61962) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2099.137631] env[61962]: DEBUG oslo_vmware.api [None req-3a67e45f-5bb9-46ca-b715-37278cbfbd88 tempest-ServersTestBootFromVolume-1925232432 tempest-ServersTestBootFromVolume-1925232432-project-member] Waiting for the task: (returnval){ [ 2099.137631] env[61962]: value = "task-4892115" [ 2099.137631] env[61962]: _type = "Task" [ 2099.137631] env[61962]: } to complete. {{(pid=61962) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2099.146440] env[61962]: DEBUG oslo_vmware.api [None req-3a67e45f-5bb9-46ca-b715-37278cbfbd88 tempest-ServersTestBootFromVolume-1925232432 tempest-ServersTestBootFromVolume-1925232432-project-member] Task: {'id': task-4892115, 'name': Rename_Task} progress is 5%. {{(pid=61962) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2099.648307] env[61962]: DEBUG oslo_vmware.api [None req-3a67e45f-5bb9-46ca-b715-37278cbfbd88 tempest-ServersTestBootFromVolume-1925232432 tempest-ServersTestBootFromVolume-1925232432-project-member] Task: {'id': task-4892115, 'name': Rename_Task, 'duration_secs': 0.118848} completed successfully. {{(pid=61962) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2099.648688] env[61962]: DEBUG nova.virt.vmwareapi.vm_util [None req-3a67e45f-5bb9-46ca-b715-37278cbfbd88 tempest-ServersTestBootFromVolume-1925232432 tempest-ServersTestBootFromVolume-1925232432-project-member] [instance: 2a520bab-558e-49e8-9816-c1acc00663f6] Powering on the VM {{(pid=61962) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1442}} [ 2099.648872] env[61962]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-dea9358e-2480-4e6e-be47-fab413b3830f {{(pid=61962) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2099.656484] env[61962]: DEBUG oslo_vmware.api [None req-3a67e45f-5bb9-46ca-b715-37278cbfbd88 tempest-ServersTestBootFromVolume-1925232432 tempest-ServersTestBootFromVolume-1925232432-project-member] Waiting for the task: (returnval){ [ 2099.656484] env[61962]: value = "task-4892116" [ 2099.656484] env[61962]: _type = "Task" [ 2099.656484] env[61962]: } to complete. {{(pid=61962) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2099.665030] env[61962]: DEBUG oslo_vmware.api [None req-3a67e45f-5bb9-46ca-b715-37278cbfbd88 tempest-ServersTestBootFromVolume-1925232432 tempest-ServersTestBootFromVolume-1925232432-project-member] Task: {'id': task-4892116, 'name': PowerOnVM_Task} progress is 0%. {{(pid=61962) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2100.166548] env[61962]: DEBUG oslo_vmware.api [None req-3a67e45f-5bb9-46ca-b715-37278cbfbd88 tempest-ServersTestBootFromVolume-1925232432 tempest-ServersTestBootFromVolume-1925232432-project-member] Task: {'id': task-4892116, 'name': PowerOnVM_Task, 'duration_secs': 0.473691} completed successfully. {{(pid=61962) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2100.166829] env[61962]: DEBUG nova.virt.vmwareapi.vm_util [None req-3a67e45f-5bb9-46ca-b715-37278cbfbd88 tempest-ServersTestBootFromVolume-1925232432 tempest-ServersTestBootFromVolume-1925232432-project-member] [instance: 2a520bab-558e-49e8-9816-c1acc00663f6] Powered on the VM {{(pid=61962) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1448}} [ 2100.166999] env[61962]: INFO nova.compute.manager [None req-3a67e45f-5bb9-46ca-b715-37278cbfbd88 tempest-ServersTestBootFromVolume-1925232432 tempest-ServersTestBootFromVolume-1925232432-project-member] [instance: 2a520bab-558e-49e8-9816-c1acc00663f6] Took 4.18 seconds to spawn the instance on the hypervisor. [ 2100.167286] env[61962]: DEBUG nova.compute.manager [None req-3a67e45f-5bb9-46ca-b715-37278cbfbd88 tempest-ServersTestBootFromVolume-1925232432 tempest-ServersTestBootFromVolume-1925232432-project-member] [instance: 2a520bab-558e-49e8-9816-c1acc00663f6] Checking state {{(pid=61962) _get_power_state /opt/stack/nova/nova/compute/manager.py:1783}} [ 2100.168101] env[61962]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2e31a8b7-606b-4fa2-aa26-dea10772a638 {{(pid=61962) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2100.234796] env[61962]: INFO nova.compute.manager [None req-3a67e45f-5bb9-46ca-b715-37278cbfbd88 tempest-ServersTestBootFromVolume-1925232432 tempest-ServersTestBootFromVolume-1925232432-project-member] [instance: 2a520bab-558e-49e8-9816-c1acc00663f6] Took 4.99 seconds to build instance. [ 2100.250538] env[61962]: DEBUG oslo_concurrency.lockutils [None req-3a67e45f-5bb9-46ca-b715-37278cbfbd88 tempest-ServersTestBootFromVolume-1925232432 tempest-ServersTestBootFromVolume-1925232432-project-member] Lock "2a520bab-558e-49e8-9816-c1acc00663f6" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 5.055s {{(pid=61962) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2101.640682] env[61962]: DEBUG nova.compute.manager [req-fa2b0d46-10cf-408c-b4aa-2279a777a514 req-3a7edf2a-6af5-4984-a1ff-b1acf7b4eb95 service nova] [instance: 2a520bab-558e-49e8-9816-c1acc00663f6] Received event network-changed-b391cad7-579a-4129-a648-832d83c15c16 {{(pid=61962) external_instance_event /opt/stack/nova/nova/compute/manager.py:11210}} [ 2101.640998] env[61962]: DEBUG nova.compute.manager [req-fa2b0d46-10cf-408c-b4aa-2279a777a514 req-3a7edf2a-6af5-4984-a1ff-b1acf7b4eb95 service nova] [instance: 2a520bab-558e-49e8-9816-c1acc00663f6] Refreshing instance network info cache due to event network-changed-b391cad7-579a-4129-a648-832d83c15c16. {{(pid=61962) external_instance_event /opt/stack/nova/nova/compute/manager.py:11215}} [ 2101.641120] env[61962]: DEBUG oslo_concurrency.lockutils [req-fa2b0d46-10cf-408c-b4aa-2279a777a514 req-3a7edf2a-6af5-4984-a1ff-b1acf7b4eb95 service nova] Acquiring lock "refresh_cache-2a520bab-558e-49e8-9816-c1acc00663f6" {{(pid=61962) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2101.641331] env[61962]: DEBUG oslo_concurrency.lockutils [req-fa2b0d46-10cf-408c-b4aa-2279a777a514 req-3a7edf2a-6af5-4984-a1ff-b1acf7b4eb95 service nova] Acquired lock "refresh_cache-2a520bab-558e-49e8-9816-c1acc00663f6" {{(pid=61962) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2101.641416] env[61962]: DEBUG nova.network.neutron [req-fa2b0d46-10cf-408c-b4aa-2279a777a514 req-3a7edf2a-6af5-4984-a1ff-b1acf7b4eb95 service nova] [instance: 2a520bab-558e-49e8-9816-c1acc00663f6] Refreshing network info cache for port b391cad7-579a-4129-a648-832d83c15c16 {{(pid=61962) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 2102.742113] env[61962]: DEBUG nova.network.neutron [req-fa2b0d46-10cf-408c-b4aa-2279a777a514 req-3a7edf2a-6af5-4984-a1ff-b1acf7b4eb95 service nova] [instance: 2a520bab-558e-49e8-9816-c1acc00663f6] Updated VIF entry in instance network info cache for port b391cad7-579a-4129-a648-832d83c15c16. {{(pid=61962) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 2102.744145] env[61962]: DEBUG nova.network.neutron [req-fa2b0d46-10cf-408c-b4aa-2279a777a514 req-3a7edf2a-6af5-4984-a1ff-b1acf7b4eb95 service nova] [instance: 2a520bab-558e-49e8-9816-c1acc00663f6] Updating instance_info_cache with network_info: [{"id": "b391cad7-579a-4129-a648-832d83c15c16", "address": "fa:16:3e:a5:6f:4c", "network": {"id": "23f760ff-3b92-41b1-bcdb-59357222b817", "bridge": "br-int", "label": "tempest-ServersTestBootFromVolume-571426812-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.141", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "bab68afcfc3e4c6f852bb69600c0f69a", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f4f4d0bb-61eb-4597-bc00-c9fdbc85f93d", "external-id": "nsx-vlan-transportzone-470", "segmentation_id": 470, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapb391cad7-57", "ovs_interfaceid": "b391cad7-579a-4129-a648-832d83c15c16", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61962) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2102.760032] env[61962]: DEBUG oslo_concurrency.lockutils [req-fa2b0d46-10cf-408c-b4aa-2279a777a514 req-3a7edf2a-6af5-4984-a1ff-b1acf7b4eb95 service nova] Releasing lock "refresh_cache-2a520bab-558e-49e8-9816-c1acc00663f6" {{(pid=61962) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2117.259109] env[61962]: WARNING oslo_vmware.rw_handles [None req-55c96cbf-6671-47d9-82d1-1db28c753841 tempest-ListServerFiltersTestJSON-924365092 tempest-ListServerFiltersTestJSON-924365092-project-member] Error occurred while reading the HTTP response.: http.client.RemoteDisconnected: Remote end closed connection without response [ 2117.259109] env[61962]: ERROR oslo_vmware.rw_handles Traceback (most recent call last): [ 2117.259109] env[61962]: ERROR oslo_vmware.rw_handles File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py", line 283, in close [ 2117.259109] env[61962]: ERROR oslo_vmware.rw_handles self._conn.getresponse() [ 2117.259109] env[61962]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 1375, in getresponse [ 2117.259109] env[61962]: ERROR oslo_vmware.rw_handles response.begin() [ 2117.259109] env[61962]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 318, in begin [ 2117.259109] env[61962]: ERROR oslo_vmware.rw_handles version, status, reason = self._read_status() [ 2117.259109] env[61962]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 287, in _read_status [ 2117.259109] env[61962]: ERROR oslo_vmware.rw_handles raise RemoteDisconnected("Remote end closed connection without" [ 2117.259109] env[61962]: ERROR oslo_vmware.rw_handles http.client.RemoteDisconnected: Remote end closed connection without response [ 2117.259109] env[61962]: ERROR oslo_vmware.rw_handles [ 2117.259959] env[61962]: DEBUG nova.virt.vmwareapi.images [None req-55c96cbf-6671-47d9-82d1-1db28c753841 tempest-ListServerFiltersTestJSON-924365092 tempest-ListServerFiltersTestJSON-924365092-project-member] [instance: 2cf5409f-eaff-4b63-b8bb-eb15a7a81760] Downloaded image file data f684bb17-3ab2-4bd5-a19e-4c36c57d0ebe to vmware_temp/780b6d01-daf2-4eee-8f9f-291782f14797/f684bb17-3ab2-4bd5-a19e-4c36c57d0ebe/tmp-sparse.vmdk on the data store datastore2 {{(pid=61962) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:258}} [ 2117.262185] env[61962]: DEBUG nova.virt.vmwareapi.vmops [None req-55c96cbf-6671-47d9-82d1-1db28c753841 tempest-ListServerFiltersTestJSON-924365092 tempest-ListServerFiltersTestJSON-924365092-project-member] [instance: 2cf5409f-eaff-4b63-b8bb-eb15a7a81760] Caching image {{(pid=61962) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 2117.262542] env[61962]: DEBUG nova.virt.vmwareapi.vm_util [None req-55c96cbf-6671-47d9-82d1-1db28c753841 tempest-ListServerFiltersTestJSON-924365092 tempest-ListServerFiltersTestJSON-924365092-project-member] Copying Virtual Disk [datastore2] vmware_temp/780b6d01-daf2-4eee-8f9f-291782f14797/f684bb17-3ab2-4bd5-a19e-4c36c57d0ebe/tmp-sparse.vmdk to [datastore2] vmware_temp/780b6d01-daf2-4eee-8f9f-291782f14797/f684bb17-3ab2-4bd5-a19e-4c36c57d0ebe/f684bb17-3ab2-4bd5-a19e-4c36c57d0ebe.vmdk {{(pid=61962) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 2117.262931] env[61962]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-89b75bc5-81af-45e7-95d7-26a5842c6953 {{(pid=61962) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2117.273053] env[61962]: DEBUG oslo_vmware.api [None req-55c96cbf-6671-47d9-82d1-1db28c753841 tempest-ListServerFiltersTestJSON-924365092 tempest-ListServerFiltersTestJSON-924365092-project-member] Waiting for the task: (returnval){ [ 2117.273053] env[61962]: value = "task-4892117" [ 2117.273053] env[61962]: _type = "Task" [ 2117.273053] env[61962]: } to complete. {{(pid=61962) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2117.281930] env[61962]: DEBUG oslo_vmware.api [None req-55c96cbf-6671-47d9-82d1-1db28c753841 tempest-ListServerFiltersTestJSON-924365092 tempest-ListServerFiltersTestJSON-924365092-project-member] Task: {'id': task-4892117, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61962) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2117.783937] env[61962]: DEBUG oslo_vmware.exceptions [None req-55c96cbf-6671-47d9-82d1-1db28c753841 tempest-ListServerFiltersTestJSON-924365092 tempest-ListServerFiltersTestJSON-924365092-project-member] Fault InvalidArgument not matched. {{(pid=61962) get_fault_class /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/exceptions.py:290}} [ 2117.785532] env[61962]: DEBUG oslo_concurrency.lockutils [None req-55c96cbf-6671-47d9-82d1-1db28c753841 tempest-ListServerFiltersTestJSON-924365092 tempest-ListServerFiltersTestJSON-924365092-project-member] Releasing lock "[datastore2] devstack-image-cache_base/f684bb17-3ab2-4bd5-a19e-4c36c57d0ebe/f684bb17-3ab2-4bd5-a19e-4c36c57d0ebe.vmdk" {{(pid=61962) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2117.785532] env[61962]: ERROR nova.compute.manager [None req-55c96cbf-6671-47d9-82d1-1db28c753841 tempest-ListServerFiltersTestJSON-924365092 tempest-ListServerFiltersTestJSON-924365092-project-member] [instance: 2cf5409f-eaff-4b63-b8bb-eb15a7a81760] Instance failed to spawn: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 2117.785532] env[61962]: Faults: ['InvalidArgument'] [ 2117.785532] env[61962]: ERROR nova.compute.manager [instance: 2cf5409f-eaff-4b63-b8bb-eb15a7a81760] Traceback (most recent call last): [ 2117.785532] env[61962]: ERROR nova.compute.manager [instance: 2cf5409f-eaff-4b63-b8bb-eb15a7a81760] File "/opt/stack/nova/nova/compute/manager.py", line 2886, in _build_resources [ 2117.785532] env[61962]: ERROR nova.compute.manager [instance: 2cf5409f-eaff-4b63-b8bb-eb15a7a81760] yield resources [ 2117.785532] env[61962]: ERROR nova.compute.manager [instance: 2cf5409f-eaff-4b63-b8bb-eb15a7a81760] File "/opt/stack/nova/nova/compute/manager.py", line 2633, in _build_and_run_instance [ 2117.785532] env[61962]: ERROR nova.compute.manager [instance: 2cf5409f-eaff-4b63-b8bb-eb15a7a81760] self.driver.spawn(context, instance, image_meta, [ 2117.785532] env[61962]: ERROR nova.compute.manager [instance: 2cf5409f-eaff-4b63-b8bb-eb15a7a81760] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 2117.785532] env[61962]: ERROR nova.compute.manager [instance: 2cf5409f-eaff-4b63-b8bb-eb15a7a81760] self._vmops.spawn(context, instance, image_meta, injected_files, [ 2117.785993] env[61962]: ERROR nova.compute.manager [instance: 2cf5409f-eaff-4b63-b8bb-eb15a7a81760] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 2117.785993] env[61962]: ERROR nova.compute.manager [instance: 2cf5409f-eaff-4b63-b8bb-eb15a7a81760] self._fetch_image_if_missing(context, vi) [ 2117.785993] env[61962]: ERROR nova.compute.manager [instance: 2cf5409f-eaff-4b63-b8bb-eb15a7a81760] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 2117.785993] env[61962]: ERROR nova.compute.manager [instance: 2cf5409f-eaff-4b63-b8bb-eb15a7a81760] image_cache(vi, tmp_image_ds_loc) [ 2117.785993] env[61962]: ERROR nova.compute.manager [instance: 2cf5409f-eaff-4b63-b8bb-eb15a7a81760] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 2117.785993] env[61962]: ERROR nova.compute.manager [instance: 2cf5409f-eaff-4b63-b8bb-eb15a7a81760] vm_util.copy_virtual_disk( [ 2117.785993] env[61962]: ERROR nova.compute.manager [instance: 2cf5409f-eaff-4b63-b8bb-eb15a7a81760] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 2117.785993] env[61962]: ERROR nova.compute.manager [instance: 2cf5409f-eaff-4b63-b8bb-eb15a7a81760] session._wait_for_task(vmdk_copy_task) [ 2117.785993] env[61962]: ERROR nova.compute.manager [instance: 2cf5409f-eaff-4b63-b8bb-eb15a7a81760] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 2117.785993] env[61962]: ERROR nova.compute.manager [instance: 2cf5409f-eaff-4b63-b8bb-eb15a7a81760] return self.wait_for_task(task_ref) [ 2117.785993] env[61962]: ERROR nova.compute.manager [instance: 2cf5409f-eaff-4b63-b8bb-eb15a7a81760] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 2117.785993] env[61962]: ERROR nova.compute.manager [instance: 2cf5409f-eaff-4b63-b8bb-eb15a7a81760] return evt.wait() [ 2117.785993] env[61962]: ERROR nova.compute.manager [instance: 2cf5409f-eaff-4b63-b8bb-eb15a7a81760] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 2117.786534] env[61962]: ERROR nova.compute.manager [instance: 2cf5409f-eaff-4b63-b8bb-eb15a7a81760] result = hub.switch() [ 2117.786534] env[61962]: ERROR nova.compute.manager [instance: 2cf5409f-eaff-4b63-b8bb-eb15a7a81760] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 2117.786534] env[61962]: ERROR nova.compute.manager [instance: 2cf5409f-eaff-4b63-b8bb-eb15a7a81760] return self.greenlet.switch() [ 2117.786534] env[61962]: ERROR nova.compute.manager [instance: 2cf5409f-eaff-4b63-b8bb-eb15a7a81760] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 2117.786534] env[61962]: ERROR nova.compute.manager [instance: 2cf5409f-eaff-4b63-b8bb-eb15a7a81760] self.f(*self.args, **self.kw) [ 2117.786534] env[61962]: ERROR nova.compute.manager [instance: 2cf5409f-eaff-4b63-b8bb-eb15a7a81760] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 2117.786534] env[61962]: ERROR nova.compute.manager [instance: 2cf5409f-eaff-4b63-b8bb-eb15a7a81760] raise exceptions.translate_fault(task_info.error) [ 2117.786534] env[61962]: ERROR nova.compute.manager [instance: 2cf5409f-eaff-4b63-b8bb-eb15a7a81760] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 2117.786534] env[61962]: ERROR nova.compute.manager [instance: 2cf5409f-eaff-4b63-b8bb-eb15a7a81760] Faults: ['InvalidArgument'] [ 2117.786534] env[61962]: ERROR nova.compute.manager [instance: 2cf5409f-eaff-4b63-b8bb-eb15a7a81760] [ 2117.786534] env[61962]: INFO nova.compute.manager [None req-55c96cbf-6671-47d9-82d1-1db28c753841 tempest-ListServerFiltersTestJSON-924365092 tempest-ListServerFiltersTestJSON-924365092-project-member] [instance: 2cf5409f-eaff-4b63-b8bb-eb15a7a81760] Terminating instance [ 2117.787125] env[61962]: DEBUG oslo_concurrency.lockutils [None req-e364545b-7bcc-40d5-a3ad-56d3b52a7b42 tempest-ServersTestJSON-536512511 tempest-ServersTestJSON-536512511-project-member] Acquired lock "[datastore2] devstack-image-cache_base/f684bb17-3ab2-4bd5-a19e-4c36c57d0ebe/f684bb17-3ab2-4bd5-a19e-4c36c57d0ebe.vmdk" {{(pid=61962) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2117.787345] env[61962]: DEBUG nova.virt.vmwareapi.ds_util [None req-e364545b-7bcc-40d5-a3ad-56d3b52a7b42 tempest-ServersTestJSON-536512511 tempest-ServersTestJSON-536512511-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=61962) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 2117.787589] env[61962]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-86ed9702-f5c9-4f3f-9fb0-a21708176006 {{(pid=61962) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2117.789806] env[61962]: DEBUG nova.compute.manager [None req-55c96cbf-6671-47d9-82d1-1db28c753841 tempest-ListServerFiltersTestJSON-924365092 tempest-ListServerFiltersTestJSON-924365092-project-member] [instance: 2cf5409f-eaff-4b63-b8bb-eb15a7a81760] Start destroying the instance on the hypervisor. {{(pid=61962) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3142}} [ 2117.790053] env[61962]: DEBUG nova.virt.vmwareapi.vmops [None req-55c96cbf-6671-47d9-82d1-1db28c753841 tempest-ListServerFiltersTestJSON-924365092 tempest-ListServerFiltersTestJSON-924365092-project-member] [instance: 2cf5409f-eaff-4b63-b8bb-eb15a7a81760] Destroying instance {{(pid=61962) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 2117.790768] env[61962]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-82cd7a2c-6de5-4261-b02c-de547cddbea8 {{(pid=61962) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2117.797762] env[61962]: DEBUG nova.virt.vmwareapi.vmops [None req-55c96cbf-6671-47d9-82d1-1db28c753841 tempest-ListServerFiltersTestJSON-924365092 tempest-ListServerFiltersTestJSON-924365092-project-member] [instance: 2cf5409f-eaff-4b63-b8bb-eb15a7a81760] Unregistering the VM {{(pid=61962) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 2117.797974] env[61962]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-1aa865e4-c598-451c-bff3-ad2009b5195d {{(pid=61962) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2117.800177] env[61962]: DEBUG nova.virt.vmwareapi.ds_util [None req-e364545b-7bcc-40d5-a3ad-56d3b52a7b42 tempest-ServersTestJSON-536512511 tempest-ServersTestJSON-536512511-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=61962) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 2117.800356] env[61962]: DEBUG nova.virt.vmwareapi.vmops [None req-e364545b-7bcc-40d5-a3ad-56d3b52a7b42 tempest-ServersTestJSON-536512511 tempest-ServersTestJSON-536512511-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=61962) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 2117.801309] env[61962]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-e02b46fa-6b34-4393-9cb5-5cd449d197e5 {{(pid=61962) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2117.806340] env[61962]: DEBUG oslo_vmware.api [None req-e364545b-7bcc-40d5-a3ad-56d3b52a7b42 tempest-ServersTestJSON-536512511 tempest-ServersTestJSON-536512511-project-member] Waiting for the task: (returnval){ [ 2117.806340] env[61962]: value = "session[5210918f-aadc-9b29-42fa-0929c2e42627]5297f0c0-f94b-9850-d7d3-08ca0232a187" [ 2117.806340] env[61962]: _type = "Task" [ 2117.806340] env[61962]: } to complete. {{(pid=61962) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2117.814453] env[61962]: DEBUG oslo_vmware.api [None req-e364545b-7bcc-40d5-a3ad-56d3b52a7b42 tempest-ServersTestJSON-536512511 tempest-ServersTestJSON-536512511-project-member] Task: {'id': session[5210918f-aadc-9b29-42fa-0929c2e42627]5297f0c0-f94b-9850-d7d3-08ca0232a187, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61962) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2117.870223] env[61962]: DEBUG nova.virt.vmwareapi.vmops [None req-55c96cbf-6671-47d9-82d1-1db28c753841 tempest-ListServerFiltersTestJSON-924365092 tempest-ListServerFiltersTestJSON-924365092-project-member] [instance: 2cf5409f-eaff-4b63-b8bb-eb15a7a81760] Unregistered the VM {{(pid=61962) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 2117.870427] env[61962]: DEBUG nova.virt.vmwareapi.vmops [None req-55c96cbf-6671-47d9-82d1-1db28c753841 tempest-ListServerFiltersTestJSON-924365092 tempest-ListServerFiltersTestJSON-924365092-project-member] [instance: 2cf5409f-eaff-4b63-b8bb-eb15a7a81760] Deleting contents of the VM from datastore datastore2 {{(pid=61962) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 2117.870607] env[61962]: DEBUG nova.virt.vmwareapi.ds_util [None req-55c96cbf-6671-47d9-82d1-1db28c753841 tempest-ListServerFiltersTestJSON-924365092 tempest-ListServerFiltersTestJSON-924365092-project-member] Deleting the datastore file [datastore2] 2cf5409f-eaff-4b63-b8bb-eb15a7a81760 {{(pid=61962) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 2117.870884] env[61962]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-02c2c2d7-ec77-4716-9539-22cf45528855 {{(pid=61962) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2117.877748] env[61962]: DEBUG oslo_vmware.api [None req-55c96cbf-6671-47d9-82d1-1db28c753841 tempest-ListServerFiltersTestJSON-924365092 tempest-ListServerFiltersTestJSON-924365092-project-member] Waiting for the task: (returnval){ [ 2117.877748] env[61962]: value = "task-4892119" [ 2117.877748] env[61962]: _type = "Task" [ 2117.877748] env[61962]: } to complete. {{(pid=61962) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2117.886718] env[61962]: DEBUG oslo_vmware.api [None req-55c96cbf-6671-47d9-82d1-1db28c753841 tempest-ListServerFiltersTestJSON-924365092 tempest-ListServerFiltersTestJSON-924365092-project-member] Task: {'id': task-4892119, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61962) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2118.316640] env[61962]: DEBUG nova.virt.vmwareapi.vmops [None req-e364545b-7bcc-40d5-a3ad-56d3b52a7b42 tempest-ServersTestJSON-536512511 tempest-ServersTestJSON-536512511-project-member] [instance: 02c198ea-61de-4e70-866c-39781d353d04] Preparing fetch location {{(pid=61962) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 2118.317065] env[61962]: DEBUG nova.virt.vmwareapi.ds_util [None req-e364545b-7bcc-40d5-a3ad-56d3b52a7b42 tempest-ServersTestJSON-536512511 tempest-ServersTestJSON-536512511-project-member] Creating directory with path [datastore2] vmware_temp/f8fcbb21-db23-4c3d-a756-75970e41c1bb/f684bb17-3ab2-4bd5-a19e-4c36c57d0ebe {{(pid=61962) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 2118.317187] env[61962]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-5a659b9f-1392-46b5-86f8-ad9765171415 {{(pid=61962) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2118.328503] env[61962]: DEBUG nova.virt.vmwareapi.ds_util [None req-e364545b-7bcc-40d5-a3ad-56d3b52a7b42 tempest-ServersTestJSON-536512511 tempest-ServersTestJSON-536512511-project-member] Created directory with path [datastore2] vmware_temp/f8fcbb21-db23-4c3d-a756-75970e41c1bb/f684bb17-3ab2-4bd5-a19e-4c36c57d0ebe {{(pid=61962) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 2118.328684] env[61962]: DEBUG nova.virt.vmwareapi.vmops [None req-e364545b-7bcc-40d5-a3ad-56d3b52a7b42 tempest-ServersTestJSON-536512511 tempest-ServersTestJSON-536512511-project-member] [instance: 02c198ea-61de-4e70-866c-39781d353d04] Fetch image to [datastore2] vmware_temp/f8fcbb21-db23-4c3d-a756-75970e41c1bb/f684bb17-3ab2-4bd5-a19e-4c36c57d0ebe/tmp-sparse.vmdk {{(pid=61962) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 2118.328877] env[61962]: DEBUG nova.virt.vmwareapi.vmops [None req-e364545b-7bcc-40d5-a3ad-56d3b52a7b42 tempest-ServersTestJSON-536512511 tempest-ServersTestJSON-536512511-project-member] [instance: 02c198ea-61de-4e70-866c-39781d353d04] Downloading image file data f684bb17-3ab2-4bd5-a19e-4c36c57d0ebe to [datastore2] vmware_temp/f8fcbb21-db23-4c3d-a756-75970e41c1bb/f684bb17-3ab2-4bd5-a19e-4c36c57d0ebe/tmp-sparse.vmdk on the data store datastore2 {{(pid=61962) _fetch_image_as_file /opt/stack/nova/nova/virt/vmwareapi/vmops.py:399}} [ 2118.329612] env[61962]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-37671222-1dc2-479a-8d4e-c1b3ae009fcc {{(pid=61962) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2118.336281] env[61962]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c67f8045-bb16-4925-8fae-6eeb82c91db2 {{(pid=61962) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2118.345074] env[61962]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bbc9a59b-9119-4eb8-9ac0-9b2682a7865c {{(pid=61962) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2118.377243] env[61962]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-062bb89e-048c-45b5-9c2f-3c6941cf984f {{(pid=61962) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2118.389062] env[61962]: DEBUG oslo_vmware.service [-] Invoking SessionManager.AcquireGenericServiceTicket with opID=oslo.vmware-56897b73-067c-4fde-a2ff-827b2d544e10 {{(pid=61962) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2118.390741] env[61962]: DEBUG oslo_vmware.api [None req-55c96cbf-6671-47d9-82d1-1db28c753841 tempest-ListServerFiltersTestJSON-924365092 tempest-ListServerFiltersTestJSON-924365092-project-member] Task: {'id': task-4892119, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.075984} completed successfully. {{(pid=61962) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2118.390981] env[61962]: DEBUG nova.virt.vmwareapi.ds_util [None req-55c96cbf-6671-47d9-82d1-1db28c753841 tempest-ListServerFiltersTestJSON-924365092 tempest-ListServerFiltersTestJSON-924365092-project-member] Deleted the datastore file {{(pid=61962) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 2118.391235] env[61962]: DEBUG nova.virt.vmwareapi.vmops [None req-55c96cbf-6671-47d9-82d1-1db28c753841 tempest-ListServerFiltersTestJSON-924365092 tempest-ListServerFiltersTestJSON-924365092-project-member] [instance: 2cf5409f-eaff-4b63-b8bb-eb15a7a81760] Deleted contents of the VM from datastore datastore2 {{(pid=61962) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 2118.391346] env[61962]: DEBUG nova.virt.vmwareapi.vmops [None req-55c96cbf-6671-47d9-82d1-1db28c753841 tempest-ListServerFiltersTestJSON-924365092 tempest-ListServerFiltersTestJSON-924365092-project-member] [instance: 2cf5409f-eaff-4b63-b8bb-eb15a7a81760] Instance destroyed {{(pid=61962) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 2118.391519] env[61962]: INFO nova.compute.manager [None req-55c96cbf-6671-47d9-82d1-1db28c753841 tempest-ListServerFiltersTestJSON-924365092 tempest-ListServerFiltersTestJSON-924365092-project-member] [instance: 2cf5409f-eaff-4b63-b8bb-eb15a7a81760] Took 0.60 seconds to destroy the instance on the hypervisor. [ 2118.393625] env[61962]: DEBUG nova.compute.claims [None req-55c96cbf-6671-47d9-82d1-1db28c753841 tempest-ListServerFiltersTestJSON-924365092 tempest-ListServerFiltersTestJSON-924365092-project-member] [instance: 2cf5409f-eaff-4b63-b8bb-eb15a7a81760] Aborting claim: {{(pid=61962) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 2118.393790] env[61962]: DEBUG oslo_concurrency.lockutils [None req-55c96cbf-6671-47d9-82d1-1db28c753841 tempest-ListServerFiltersTestJSON-924365092 tempest-ListServerFiltersTestJSON-924365092-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=61962) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2118.393998] env[61962]: DEBUG oslo_concurrency.lockutils [None req-55c96cbf-6671-47d9-82d1-1db28c753841 tempest-ListServerFiltersTestJSON-924365092 tempest-ListServerFiltersTestJSON-924365092-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 0.000s {{(pid=61962) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2118.413173] env[61962]: DEBUG nova.virt.vmwareapi.images [None req-e364545b-7bcc-40d5-a3ad-56d3b52a7b42 tempest-ServersTestJSON-536512511 tempest-ServersTestJSON-536512511-project-member] [instance: 02c198ea-61de-4e70-866c-39781d353d04] Downloading image file data f684bb17-3ab2-4bd5-a19e-4c36c57d0ebe to the data store datastore2 {{(pid=61962) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:245}} [ 2118.469959] env[61962]: DEBUG oslo_vmware.rw_handles [None req-e364545b-7bcc-40d5-a3ad-56d3b52a7b42 tempest-ServersTestJSON-536512511 tempest-ServersTestJSON-536512511-project-member] Creating HTTP connection to write to file with size = 21318656 and URL = https://esx7c1n3.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/f8fcbb21-db23-4c3d-a756-75970e41c1bb/f684bb17-3ab2-4bd5-a19e-4c36c57d0ebe/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=61962) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 2118.529213] env[61962]: DEBUG oslo_vmware.rw_handles [None req-e364545b-7bcc-40d5-a3ad-56d3b52a7b42 tempest-ServersTestJSON-536512511 tempest-ServersTestJSON-536512511-project-member] Completed reading data from the image iterator. {{(pid=61962) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 2118.529349] env[61962]: DEBUG oslo_vmware.rw_handles [None req-e364545b-7bcc-40d5-a3ad-56d3b52a7b42 tempest-ServersTestJSON-536512511 tempest-ServersTestJSON-536512511-project-member] Closing write handle for https://esx7c1n3.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/f8fcbb21-db23-4c3d-a756-75970e41c1bb/f684bb17-3ab2-4bd5-a19e-4c36c57d0ebe/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=61962) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:281}} [ 2118.574763] env[61962]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a336dca3-483d-4917-96b1-99972abbfc9c {{(pid=61962) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2118.582526] env[61962]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e983321d-bd6c-4ca5-ac1a-0ca1f381474d {{(pid=61962) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2118.611834] env[61962]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5cd89b97-98ae-4a75-8446-2f792cebe924 {{(pid=61962) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2118.619040] env[61962]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-42cb79b6-bd54-4031-811a-b8f19173ee1e {{(pid=61962) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2118.633955] env[61962]: DEBUG nova.compute.provider_tree [None req-55c96cbf-6671-47d9-82d1-1db28c753841 tempest-ListServerFiltersTestJSON-924365092 tempest-ListServerFiltersTestJSON-924365092-project-member] Inventory has not changed in ProviderTree for provider: 5a20dc57-fddd-49ec-bab5-953a03eebaa1 {{(pid=61962) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2118.642961] env[61962]: DEBUG nova.scheduler.client.report [None req-55c96cbf-6671-47d9-82d1-1db28c753841 tempest-ListServerFiltersTestJSON-924365092 tempest-ListServerFiltersTestJSON-924365092-project-member] Inventory has not changed for provider 5a20dc57-fddd-49ec-bab5-953a03eebaa1 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 96, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61962) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 2118.657873] env[61962]: DEBUG oslo_concurrency.lockutils [None req-55c96cbf-6671-47d9-82d1-1db28c753841 tempest-ListServerFiltersTestJSON-924365092 tempest-ListServerFiltersTestJSON-924365092-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 0.264s {{(pid=61962) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2118.658417] env[61962]: ERROR nova.compute.manager [None req-55c96cbf-6671-47d9-82d1-1db28c753841 tempest-ListServerFiltersTestJSON-924365092 tempest-ListServerFiltersTestJSON-924365092-project-member] [instance: 2cf5409f-eaff-4b63-b8bb-eb15a7a81760] Failed to build and run instance: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 2118.658417] env[61962]: Faults: ['InvalidArgument'] [ 2118.658417] env[61962]: ERROR nova.compute.manager [instance: 2cf5409f-eaff-4b63-b8bb-eb15a7a81760] Traceback (most recent call last): [ 2118.658417] env[61962]: ERROR nova.compute.manager [instance: 2cf5409f-eaff-4b63-b8bb-eb15a7a81760] File "/opt/stack/nova/nova/compute/manager.py", line 2633, in _build_and_run_instance [ 2118.658417] env[61962]: ERROR nova.compute.manager [instance: 2cf5409f-eaff-4b63-b8bb-eb15a7a81760] self.driver.spawn(context, instance, image_meta, [ 2118.658417] env[61962]: ERROR nova.compute.manager [instance: 2cf5409f-eaff-4b63-b8bb-eb15a7a81760] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 2118.658417] env[61962]: ERROR nova.compute.manager [instance: 2cf5409f-eaff-4b63-b8bb-eb15a7a81760] self._vmops.spawn(context, instance, image_meta, injected_files, [ 2118.658417] env[61962]: ERROR nova.compute.manager [instance: 2cf5409f-eaff-4b63-b8bb-eb15a7a81760] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 2118.658417] env[61962]: ERROR nova.compute.manager [instance: 2cf5409f-eaff-4b63-b8bb-eb15a7a81760] self._fetch_image_if_missing(context, vi) [ 2118.658417] env[61962]: ERROR nova.compute.manager [instance: 2cf5409f-eaff-4b63-b8bb-eb15a7a81760] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 2118.658417] env[61962]: ERROR nova.compute.manager [instance: 2cf5409f-eaff-4b63-b8bb-eb15a7a81760] image_cache(vi, tmp_image_ds_loc) [ 2118.658417] env[61962]: ERROR nova.compute.manager [instance: 2cf5409f-eaff-4b63-b8bb-eb15a7a81760] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 2118.658865] env[61962]: ERROR nova.compute.manager [instance: 2cf5409f-eaff-4b63-b8bb-eb15a7a81760] vm_util.copy_virtual_disk( [ 2118.658865] env[61962]: ERROR nova.compute.manager [instance: 2cf5409f-eaff-4b63-b8bb-eb15a7a81760] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 2118.658865] env[61962]: ERROR nova.compute.manager [instance: 2cf5409f-eaff-4b63-b8bb-eb15a7a81760] session._wait_for_task(vmdk_copy_task) [ 2118.658865] env[61962]: ERROR nova.compute.manager [instance: 2cf5409f-eaff-4b63-b8bb-eb15a7a81760] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 2118.658865] env[61962]: ERROR nova.compute.manager [instance: 2cf5409f-eaff-4b63-b8bb-eb15a7a81760] return self.wait_for_task(task_ref) [ 2118.658865] env[61962]: ERROR nova.compute.manager [instance: 2cf5409f-eaff-4b63-b8bb-eb15a7a81760] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 2118.658865] env[61962]: ERROR nova.compute.manager [instance: 2cf5409f-eaff-4b63-b8bb-eb15a7a81760] return evt.wait() [ 2118.658865] env[61962]: ERROR nova.compute.manager [instance: 2cf5409f-eaff-4b63-b8bb-eb15a7a81760] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 2118.658865] env[61962]: ERROR nova.compute.manager [instance: 2cf5409f-eaff-4b63-b8bb-eb15a7a81760] result = hub.switch() [ 2118.658865] env[61962]: ERROR nova.compute.manager [instance: 2cf5409f-eaff-4b63-b8bb-eb15a7a81760] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 2118.658865] env[61962]: ERROR nova.compute.manager [instance: 2cf5409f-eaff-4b63-b8bb-eb15a7a81760] return self.greenlet.switch() [ 2118.658865] env[61962]: ERROR nova.compute.manager [instance: 2cf5409f-eaff-4b63-b8bb-eb15a7a81760] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 2118.658865] env[61962]: ERROR nova.compute.manager [instance: 2cf5409f-eaff-4b63-b8bb-eb15a7a81760] self.f(*self.args, **self.kw) [ 2118.659238] env[61962]: ERROR nova.compute.manager [instance: 2cf5409f-eaff-4b63-b8bb-eb15a7a81760] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 2118.659238] env[61962]: ERROR nova.compute.manager [instance: 2cf5409f-eaff-4b63-b8bb-eb15a7a81760] raise exceptions.translate_fault(task_info.error) [ 2118.659238] env[61962]: ERROR nova.compute.manager [instance: 2cf5409f-eaff-4b63-b8bb-eb15a7a81760] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 2118.659238] env[61962]: ERROR nova.compute.manager [instance: 2cf5409f-eaff-4b63-b8bb-eb15a7a81760] Faults: ['InvalidArgument'] [ 2118.659238] env[61962]: ERROR nova.compute.manager [instance: 2cf5409f-eaff-4b63-b8bb-eb15a7a81760] [ 2118.659238] env[61962]: DEBUG nova.compute.utils [None req-55c96cbf-6671-47d9-82d1-1db28c753841 tempest-ListServerFiltersTestJSON-924365092 tempest-ListServerFiltersTestJSON-924365092-project-member] [instance: 2cf5409f-eaff-4b63-b8bb-eb15a7a81760] VimFaultException {{(pid=61962) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 2118.660992] env[61962]: DEBUG nova.compute.manager [None req-55c96cbf-6671-47d9-82d1-1db28c753841 tempest-ListServerFiltersTestJSON-924365092 tempest-ListServerFiltersTestJSON-924365092-project-member] [instance: 2cf5409f-eaff-4b63-b8bb-eb15a7a81760] Build of instance 2cf5409f-eaff-4b63-b8bb-eb15a7a81760 was re-scheduled: A specified parameter was not correct: fileType [ 2118.660992] env[61962]: Faults: ['InvalidArgument'] {{(pid=61962) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2472}} [ 2118.661389] env[61962]: DEBUG nova.compute.manager [None req-55c96cbf-6671-47d9-82d1-1db28c753841 tempest-ListServerFiltersTestJSON-924365092 tempest-ListServerFiltersTestJSON-924365092-project-member] [instance: 2cf5409f-eaff-4b63-b8bb-eb15a7a81760] Unplugging VIFs for instance {{(pid=61962) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:2998}} [ 2118.661628] env[61962]: DEBUG nova.compute.manager [None req-55c96cbf-6671-47d9-82d1-1db28c753841 tempest-ListServerFiltersTestJSON-924365092 tempest-ListServerFiltersTestJSON-924365092-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=61962) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3021}} [ 2118.661815] env[61962]: DEBUG nova.compute.manager [None req-55c96cbf-6671-47d9-82d1-1db28c753841 tempest-ListServerFiltersTestJSON-924365092 tempest-ListServerFiltersTestJSON-924365092-project-member] [instance: 2cf5409f-eaff-4b63-b8bb-eb15a7a81760] Deallocating network for instance {{(pid=61962) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2281}} [ 2118.661981] env[61962]: DEBUG nova.network.neutron [None req-55c96cbf-6671-47d9-82d1-1db28c753841 tempest-ListServerFiltersTestJSON-924365092 tempest-ListServerFiltersTestJSON-924365092-project-member] [instance: 2cf5409f-eaff-4b63-b8bb-eb15a7a81760] deallocate_for_instance() {{(pid=61962) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 2118.967500] env[61962]: DEBUG nova.network.neutron [None req-55c96cbf-6671-47d9-82d1-1db28c753841 tempest-ListServerFiltersTestJSON-924365092 tempest-ListServerFiltersTestJSON-924365092-project-member] [instance: 2cf5409f-eaff-4b63-b8bb-eb15a7a81760] Updating instance_info_cache with network_info: [] {{(pid=61962) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2118.980975] env[61962]: INFO nova.compute.manager [None req-55c96cbf-6671-47d9-82d1-1db28c753841 tempest-ListServerFiltersTestJSON-924365092 tempest-ListServerFiltersTestJSON-924365092-project-member] [instance: 2cf5409f-eaff-4b63-b8bb-eb15a7a81760] Took 0.32 seconds to deallocate network for instance. [ 2119.102643] env[61962]: INFO nova.scheduler.client.report [None req-55c96cbf-6671-47d9-82d1-1db28c753841 tempest-ListServerFiltersTestJSON-924365092 tempest-ListServerFiltersTestJSON-924365092-project-member] Deleted allocations for instance 2cf5409f-eaff-4b63-b8bb-eb15a7a81760 [ 2119.124217] env[61962]: DEBUG oslo_concurrency.lockutils [None req-55c96cbf-6671-47d9-82d1-1db28c753841 tempest-ListServerFiltersTestJSON-924365092 tempest-ListServerFiltersTestJSON-924365092-project-member] Lock "2cf5409f-eaff-4b63-b8bb-eb15a7a81760" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 664.999s {{(pid=61962) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2119.124582] env[61962]: DEBUG oslo_concurrency.lockutils [None req-2cf95c8f-0d61-4c93-8475-490eb8177967 tempest-ListServerFiltersTestJSON-924365092 tempest-ListServerFiltersTestJSON-924365092-project-member] Lock "2cf5409f-eaff-4b63-b8bb-eb15a7a81760" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 468.821s {{(pid=61962) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2119.124817] env[61962]: DEBUG oslo_concurrency.lockutils [None req-2cf95c8f-0d61-4c93-8475-490eb8177967 tempest-ListServerFiltersTestJSON-924365092 tempest-ListServerFiltersTestJSON-924365092-project-member] Acquiring lock "2cf5409f-eaff-4b63-b8bb-eb15a7a81760-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=61962) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2119.125034] env[61962]: DEBUG oslo_concurrency.lockutils [None req-2cf95c8f-0d61-4c93-8475-490eb8177967 tempest-ListServerFiltersTestJSON-924365092 tempest-ListServerFiltersTestJSON-924365092-project-member] Lock "2cf5409f-eaff-4b63-b8bb-eb15a7a81760-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=61962) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2119.125206] env[61962]: DEBUG oslo_concurrency.lockutils [None req-2cf95c8f-0d61-4c93-8475-490eb8177967 tempest-ListServerFiltersTestJSON-924365092 tempest-ListServerFiltersTestJSON-924365092-project-member] Lock "2cf5409f-eaff-4b63-b8bb-eb15a7a81760-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=61962) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2119.127178] env[61962]: INFO nova.compute.manager [None req-2cf95c8f-0d61-4c93-8475-490eb8177967 tempest-ListServerFiltersTestJSON-924365092 tempest-ListServerFiltersTestJSON-924365092-project-member] [instance: 2cf5409f-eaff-4b63-b8bb-eb15a7a81760] Terminating instance [ 2119.129128] env[61962]: DEBUG nova.compute.manager [None req-2cf95c8f-0d61-4c93-8475-490eb8177967 tempest-ListServerFiltersTestJSON-924365092 tempest-ListServerFiltersTestJSON-924365092-project-member] [instance: 2cf5409f-eaff-4b63-b8bb-eb15a7a81760] Start destroying the instance on the hypervisor. {{(pid=61962) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3142}} [ 2119.129324] env[61962]: DEBUG nova.virt.vmwareapi.vmops [None req-2cf95c8f-0d61-4c93-8475-490eb8177967 tempest-ListServerFiltersTestJSON-924365092 tempest-ListServerFiltersTestJSON-924365092-project-member] [instance: 2cf5409f-eaff-4b63-b8bb-eb15a7a81760] Destroying instance {{(pid=61962) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 2119.129888] env[61962]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-57ea7f74-7068-48d6-90f2-3f60fffa1dfe {{(pid=61962) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2119.139923] env[61962]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9ba895ac-1033-4e39-9b26-661f5d99c45e {{(pid=61962) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2119.169503] env[61962]: WARNING nova.virt.vmwareapi.vmops [None req-2cf95c8f-0d61-4c93-8475-490eb8177967 tempest-ListServerFiltersTestJSON-924365092 tempest-ListServerFiltersTestJSON-924365092-project-member] [instance: 2cf5409f-eaff-4b63-b8bb-eb15a7a81760] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 2cf5409f-eaff-4b63-b8bb-eb15a7a81760 could not be found. [ 2119.169715] env[61962]: DEBUG nova.virt.vmwareapi.vmops [None req-2cf95c8f-0d61-4c93-8475-490eb8177967 tempest-ListServerFiltersTestJSON-924365092 tempest-ListServerFiltersTestJSON-924365092-project-member] [instance: 2cf5409f-eaff-4b63-b8bb-eb15a7a81760] Instance destroyed {{(pid=61962) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 2119.169897] env[61962]: INFO nova.compute.manager [None req-2cf95c8f-0d61-4c93-8475-490eb8177967 tempest-ListServerFiltersTestJSON-924365092 tempest-ListServerFiltersTestJSON-924365092-project-member] [instance: 2cf5409f-eaff-4b63-b8bb-eb15a7a81760] Took 0.04 seconds to destroy the instance on the hypervisor. [ 2119.170191] env[61962]: DEBUG oslo.service.loopingcall [None req-2cf95c8f-0d61-4c93-8475-490eb8177967 tempest-ListServerFiltersTestJSON-924365092 tempest-ListServerFiltersTestJSON-924365092-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61962) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 2119.170419] env[61962]: DEBUG nova.compute.manager [-] [instance: 2cf5409f-eaff-4b63-b8bb-eb15a7a81760] Deallocating network for instance {{(pid=61962) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2281}} [ 2119.170517] env[61962]: DEBUG nova.network.neutron [-] [instance: 2cf5409f-eaff-4b63-b8bb-eb15a7a81760] deallocate_for_instance() {{(pid=61962) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 2119.202967] env[61962]: DEBUG nova.network.neutron [-] [instance: 2cf5409f-eaff-4b63-b8bb-eb15a7a81760] Updating instance_info_cache with network_info: [] {{(pid=61962) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2119.211641] env[61962]: INFO nova.compute.manager [-] [instance: 2cf5409f-eaff-4b63-b8bb-eb15a7a81760] Took 0.04 seconds to deallocate network for instance. [ 2119.306178] env[61962]: DEBUG oslo_concurrency.lockutils [None req-2cf95c8f-0d61-4c93-8475-490eb8177967 tempest-ListServerFiltersTestJSON-924365092 tempest-ListServerFiltersTestJSON-924365092-project-member] Lock "2cf5409f-eaff-4b63-b8bb-eb15a7a81760" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 0.182s {{(pid=61962) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2119.307560] env[61962]: DEBUG oslo_concurrency.lockutils [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Lock "2cf5409f-eaff-4b63-b8bb-eb15a7a81760" acquired by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: waited 381.788s {{(pid=61962) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2119.307892] env[61962]: INFO nova.compute.manager [None req-ba337279-4320-40ca-b616-7e590432f203 None None] [instance: 2cf5409f-eaff-4b63-b8bb-eb15a7a81760] During sync_power_state the instance has a pending task (deleting). Skip. [ 2119.308106] env[61962]: DEBUG oslo_concurrency.lockutils [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Lock "2cf5409f-eaff-4b63-b8bb-eb15a7a81760" "released" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: held 0.001s {{(pid=61962) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2119.942030] env[61962]: DEBUG oslo_concurrency.lockutils [None req-2d89cc0b-d44c-4715-94bf-bf1238860dd6 tempest-ServersTestBootFromVolume-1925232432 tempest-ServersTestBootFromVolume-1925232432-project-member] Acquiring lock "2a520bab-558e-49e8-9816-c1acc00663f6" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=61962) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2119.942419] env[61962]: DEBUG oslo_concurrency.lockutils [None req-2d89cc0b-d44c-4715-94bf-bf1238860dd6 tempest-ServersTestBootFromVolume-1925232432 tempest-ServersTestBootFromVolume-1925232432-project-member] Lock "2a520bab-558e-49e8-9816-c1acc00663f6" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=61962) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2119.942662] env[61962]: DEBUG oslo_concurrency.lockutils [None req-2d89cc0b-d44c-4715-94bf-bf1238860dd6 tempest-ServersTestBootFromVolume-1925232432 tempest-ServersTestBootFromVolume-1925232432-project-member] Acquiring lock "2a520bab-558e-49e8-9816-c1acc00663f6-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=61962) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2119.942662] env[61962]: DEBUG oslo_concurrency.lockutils [None req-2d89cc0b-d44c-4715-94bf-bf1238860dd6 tempest-ServersTestBootFromVolume-1925232432 tempest-ServersTestBootFromVolume-1925232432-project-member] Lock "2a520bab-558e-49e8-9816-c1acc00663f6-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=61962) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2119.942836] env[61962]: DEBUG oslo_concurrency.lockutils [None req-2d89cc0b-d44c-4715-94bf-bf1238860dd6 tempest-ServersTestBootFromVolume-1925232432 tempest-ServersTestBootFromVolume-1925232432-project-member] Lock "2a520bab-558e-49e8-9816-c1acc00663f6-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=61962) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2119.944668] env[61962]: INFO nova.compute.manager [None req-2d89cc0b-d44c-4715-94bf-bf1238860dd6 tempest-ServersTestBootFromVolume-1925232432 tempest-ServersTestBootFromVolume-1925232432-project-member] [instance: 2a520bab-558e-49e8-9816-c1acc00663f6] Terminating instance [ 2119.946839] env[61962]: DEBUG nova.compute.manager [None req-2d89cc0b-d44c-4715-94bf-bf1238860dd6 tempest-ServersTestBootFromVolume-1925232432 tempest-ServersTestBootFromVolume-1925232432-project-member] [instance: 2a520bab-558e-49e8-9816-c1acc00663f6] Start destroying the instance on the hypervisor. {{(pid=61962) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3142}} [ 2119.947077] env[61962]: DEBUG nova.virt.vmwareapi.vm_util [None req-2d89cc0b-d44c-4715-94bf-bf1238860dd6 tempest-ServersTestBootFromVolume-1925232432 tempest-ServersTestBootFromVolume-1925232432-project-member] [instance: 2a520bab-558e-49e8-9816-c1acc00663f6] Powering off the VM {{(pid=61962) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1502}} [ 2119.947541] env[61962]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-c982acdd-b80b-4b9a-8f88-ed3385c4fd1c {{(pid=61962) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2119.956204] env[61962]: DEBUG oslo_vmware.api [None req-2d89cc0b-d44c-4715-94bf-bf1238860dd6 tempest-ServersTestBootFromVolume-1925232432 tempest-ServersTestBootFromVolume-1925232432-project-member] Waiting for the task: (returnval){ [ 2119.956204] env[61962]: value = "task-4892120" [ 2119.956204] env[61962]: _type = "Task" [ 2119.956204] env[61962]: } to complete. {{(pid=61962) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2119.965265] env[61962]: DEBUG oslo_vmware.api [None req-2d89cc0b-d44c-4715-94bf-bf1238860dd6 tempest-ServersTestBootFromVolume-1925232432 tempest-ServersTestBootFromVolume-1925232432-project-member] Task: {'id': task-4892120, 'name': PowerOffVM_Task} progress is 0%. {{(pid=61962) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2120.466473] env[61962]: DEBUG oslo_vmware.api [None req-2d89cc0b-d44c-4715-94bf-bf1238860dd6 tempest-ServersTestBootFromVolume-1925232432 tempest-ServersTestBootFromVolume-1925232432-project-member] Task: {'id': task-4892120, 'name': PowerOffVM_Task, 'duration_secs': 0.203584} completed successfully. {{(pid=61962) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2120.466752] env[61962]: DEBUG nova.virt.vmwareapi.vm_util [None req-2d89cc0b-d44c-4715-94bf-bf1238860dd6 tempest-ServersTestBootFromVolume-1925232432 tempest-ServersTestBootFromVolume-1925232432-project-member] [instance: 2a520bab-558e-49e8-9816-c1acc00663f6] Powered off the VM {{(pid=61962) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1507}} [ 2120.466927] env[61962]: DEBUG nova.virt.vmwareapi.volumeops [None req-2d89cc0b-d44c-4715-94bf-bf1238860dd6 tempest-ServersTestBootFromVolume-1925232432 tempest-ServersTestBootFromVolume-1925232432-project-member] [instance: 2a520bab-558e-49e8-9816-c1acc00663f6] Volume detach. Driver type: vmdk {{(pid=61962) detach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:646}} [ 2120.467129] env[61962]: DEBUG nova.virt.vmwareapi.volumeops [None req-2d89cc0b-d44c-4715-94bf-bf1238860dd6 tempest-ServersTestBootFromVolume-1925232432 tempest-ServersTestBootFromVolume-1925232432-project-member] [instance: 2a520bab-558e-49e8-9816-c1acc00663f6] _detach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-953429', 'volume_id': '283acf53-fbac-4900-a61f-a572b75ac300', 'name': 'volume-283acf53-fbac-4900-a61f-a572b75ac300', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': '2a520bab-558e-49e8-9816-c1acc00663f6', 'attached_at': '', 'detached_at': '', 'volume_id': '283acf53-fbac-4900-a61f-a572b75ac300', 'serial': '283acf53-fbac-4900-a61f-a572b75ac300'} {{(pid=61962) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:571}} [ 2120.467882] env[61962]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5990810b-b148-4b55-8bd8-6e15a9b03205 {{(pid=61962) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2120.486297] env[61962]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-95751d5f-3d10-44f9-ba88-aad0f95ed5e4 {{(pid=61962) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2120.493712] env[61962]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b2a7871c-0600-4738-9240-58b6d0041601 {{(pid=61962) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2120.511204] env[61962]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1c33ab42-69eb-44b0-a412-3b8bef223fa4 {{(pid=61962) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2120.526267] env[61962]: DEBUG nova.virt.vmwareapi.volumeops [None req-2d89cc0b-d44c-4715-94bf-bf1238860dd6 tempest-ServersTestBootFromVolume-1925232432 tempest-ServersTestBootFromVolume-1925232432-project-member] The volume has not been displaced from its original location: [datastore2] volume-283acf53-fbac-4900-a61f-a572b75ac300/volume-283acf53-fbac-4900-a61f-a572b75ac300.vmdk. No consolidation needed. {{(pid=61962) _consolidate_vmdk_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:504}} [ 2120.531857] env[61962]: DEBUG nova.virt.vmwareapi.volumeops [None req-2d89cc0b-d44c-4715-94bf-bf1238860dd6 tempest-ServersTestBootFromVolume-1925232432 tempest-ServersTestBootFromVolume-1925232432-project-member] [instance: 2a520bab-558e-49e8-9816-c1acc00663f6] Reconfiguring VM instance instance-00000059 to detach disk 2000 {{(pid=61962) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 2120.532153] env[61962]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-8b135cf8-43f8-455f-a940-1cf720fbccd8 {{(pid=61962) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2120.550057] env[61962]: DEBUG oslo_vmware.api [None req-2d89cc0b-d44c-4715-94bf-bf1238860dd6 tempest-ServersTestBootFromVolume-1925232432 tempest-ServersTestBootFromVolume-1925232432-project-member] Waiting for the task: (returnval){ [ 2120.550057] env[61962]: value = "task-4892121" [ 2120.550057] env[61962]: _type = "Task" [ 2120.550057] env[61962]: } to complete. {{(pid=61962) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2120.557904] env[61962]: DEBUG oslo_vmware.api [None req-2d89cc0b-d44c-4715-94bf-bf1238860dd6 tempest-ServersTestBootFromVolume-1925232432 tempest-ServersTestBootFromVolume-1925232432-project-member] Task: {'id': task-4892121, 'name': ReconfigVM_Task} progress is 5%. {{(pid=61962) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2121.060218] env[61962]: DEBUG oslo_vmware.api [None req-2d89cc0b-d44c-4715-94bf-bf1238860dd6 tempest-ServersTestBootFromVolume-1925232432 tempest-ServersTestBootFromVolume-1925232432-project-member] Task: {'id': task-4892121, 'name': ReconfigVM_Task, 'duration_secs': 0.152026} completed successfully. {{(pid=61962) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2121.060572] env[61962]: DEBUG nova.virt.vmwareapi.volumeops [None req-2d89cc0b-d44c-4715-94bf-bf1238860dd6 tempest-ServersTestBootFromVolume-1925232432 tempest-ServersTestBootFromVolume-1925232432-project-member] [instance: 2a520bab-558e-49e8-9816-c1acc00663f6] Reconfigured VM instance instance-00000059 to detach disk 2000 {{(pid=61962) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 2121.065067] env[61962]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-7f9a109a-92ac-48c3-8171-ed321e2e58f7 {{(pid=61962) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2121.081534] env[61962]: DEBUG oslo_vmware.api [None req-2d89cc0b-d44c-4715-94bf-bf1238860dd6 tempest-ServersTestBootFromVolume-1925232432 tempest-ServersTestBootFromVolume-1925232432-project-member] Waiting for the task: (returnval){ [ 2121.081534] env[61962]: value = "task-4892122" [ 2121.081534] env[61962]: _type = "Task" [ 2121.081534] env[61962]: } to complete. {{(pid=61962) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2121.091630] env[61962]: DEBUG oslo_vmware.api [None req-2d89cc0b-d44c-4715-94bf-bf1238860dd6 tempest-ServersTestBootFromVolume-1925232432 tempest-ServersTestBootFromVolume-1925232432-project-member] Task: {'id': task-4892122, 'name': ReconfigVM_Task} progress is 5%. {{(pid=61962) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2121.591587] env[61962]: DEBUG oslo_vmware.api [None req-2d89cc0b-d44c-4715-94bf-bf1238860dd6 tempest-ServersTestBootFromVolume-1925232432 tempest-ServersTestBootFromVolume-1925232432-project-member] Task: {'id': task-4892122, 'name': ReconfigVM_Task, 'duration_secs': 0.140198} completed successfully. {{(pid=61962) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2121.591992] env[61962]: DEBUG nova.virt.vmwareapi.volumeops [None req-2d89cc0b-d44c-4715-94bf-bf1238860dd6 tempest-ServersTestBootFromVolume-1925232432 tempest-ServersTestBootFromVolume-1925232432-project-member] [instance: 2a520bab-558e-49e8-9816-c1acc00663f6] Detached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-953429', 'volume_id': '283acf53-fbac-4900-a61f-a572b75ac300', 'name': 'volume-283acf53-fbac-4900-a61f-a572b75ac300', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': '2a520bab-558e-49e8-9816-c1acc00663f6', 'attached_at': '', 'detached_at': '', 'volume_id': '283acf53-fbac-4900-a61f-a572b75ac300', 'serial': '283acf53-fbac-4900-a61f-a572b75ac300'} {{(pid=61962) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:605}} [ 2121.592346] env[61962]: DEBUG nova.virt.vmwareapi.vmops [None req-2d89cc0b-d44c-4715-94bf-bf1238860dd6 tempest-ServersTestBootFromVolume-1925232432 tempest-ServersTestBootFromVolume-1925232432-project-member] [instance: 2a520bab-558e-49e8-9816-c1acc00663f6] Destroying instance {{(pid=61962) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 2121.593113] env[61962]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dd8c08c0-bcd2-4261-8767-f2512bd127d3 {{(pid=61962) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2121.599821] env[61962]: DEBUG nova.virt.vmwareapi.vmops [None req-2d89cc0b-d44c-4715-94bf-bf1238860dd6 tempest-ServersTestBootFromVolume-1925232432 tempest-ServersTestBootFromVolume-1925232432-project-member] [instance: 2a520bab-558e-49e8-9816-c1acc00663f6] Unregistering the VM {{(pid=61962) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 2121.600082] env[61962]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-bdcc0e71-3b99-464e-b4ff-f790a9786670 {{(pid=61962) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2121.657698] env[61962]: DEBUG nova.virt.vmwareapi.vmops [None req-2d89cc0b-d44c-4715-94bf-bf1238860dd6 tempest-ServersTestBootFromVolume-1925232432 tempest-ServersTestBootFromVolume-1925232432-project-member] [instance: 2a520bab-558e-49e8-9816-c1acc00663f6] Unregistered the VM {{(pid=61962) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 2121.657952] env[61962]: DEBUG nova.virt.vmwareapi.vmops [None req-2d89cc0b-d44c-4715-94bf-bf1238860dd6 tempest-ServersTestBootFromVolume-1925232432 tempest-ServersTestBootFromVolume-1925232432-project-member] [instance: 2a520bab-558e-49e8-9816-c1acc00663f6] Deleting contents of the VM from datastore datastore2 {{(pid=61962) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 2121.658111] env[61962]: DEBUG nova.virt.vmwareapi.ds_util [None req-2d89cc0b-d44c-4715-94bf-bf1238860dd6 tempest-ServersTestBootFromVolume-1925232432 tempest-ServersTestBootFromVolume-1925232432-project-member] Deleting the datastore file [datastore2] 2a520bab-558e-49e8-9816-c1acc00663f6 {{(pid=61962) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 2121.658395] env[61962]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-f8e488bf-0286-4193-9a85-8989b5be704f {{(pid=61962) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2121.664961] env[61962]: DEBUG oslo_vmware.api [None req-2d89cc0b-d44c-4715-94bf-bf1238860dd6 tempest-ServersTestBootFromVolume-1925232432 tempest-ServersTestBootFromVolume-1925232432-project-member] Waiting for the task: (returnval){ [ 2121.664961] env[61962]: value = "task-4892124" [ 2121.664961] env[61962]: _type = "Task" [ 2121.664961] env[61962]: } to complete. {{(pid=61962) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2121.673960] env[61962]: DEBUG oslo_vmware.api [None req-2d89cc0b-d44c-4715-94bf-bf1238860dd6 tempest-ServersTestBootFromVolume-1925232432 tempest-ServersTestBootFromVolume-1925232432-project-member] Task: {'id': task-4892124, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61962) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2122.174926] env[61962]: DEBUG oslo_vmware.api [None req-2d89cc0b-d44c-4715-94bf-bf1238860dd6 tempest-ServersTestBootFromVolume-1925232432 tempest-ServersTestBootFromVolume-1925232432-project-member] Task: {'id': task-4892124, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.069029} completed successfully. {{(pid=61962) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2122.175308] env[61962]: DEBUG nova.virt.vmwareapi.ds_util [None req-2d89cc0b-d44c-4715-94bf-bf1238860dd6 tempest-ServersTestBootFromVolume-1925232432 tempest-ServersTestBootFromVolume-1925232432-project-member] Deleted the datastore file {{(pid=61962) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 2122.175308] env[61962]: DEBUG nova.virt.vmwareapi.vmops [None req-2d89cc0b-d44c-4715-94bf-bf1238860dd6 tempest-ServersTestBootFromVolume-1925232432 tempest-ServersTestBootFromVolume-1925232432-project-member] [instance: 2a520bab-558e-49e8-9816-c1acc00663f6] Deleted contents of the VM from datastore datastore2 {{(pid=61962) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 2122.175458] env[61962]: DEBUG nova.virt.vmwareapi.vmops [None req-2d89cc0b-d44c-4715-94bf-bf1238860dd6 tempest-ServersTestBootFromVolume-1925232432 tempest-ServersTestBootFromVolume-1925232432-project-member] [instance: 2a520bab-558e-49e8-9816-c1acc00663f6] Instance destroyed {{(pid=61962) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 2122.175628] env[61962]: INFO nova.compute.manager [None req-2d89cc0b-d44c-4715-94bf-bf1238860dd6 tempest-ServersTestBootFromVolume-1925232432 tempest-ServersTestBootFromVolume-1925232432-project-member] [instance: 2a520bab-558e-49e8-9816-c1acc00663f6] Took 2.23 seconds to destroy the instance on the hypervisor. [ 2122.175862] env[61962]: DEBUG oslo.service.loopingcall [None req-2d89cc0b-d44c-4715-94bf-bf1238860dd6 tempest-ServersTestBootFromVolume-1925232432 tempest-ServersTestBootFromVolume-1925232432-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61962) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 2122.176058] env[61962]: DEBUG nova.compute.manager [-] [instance: 2a520bab-558e-49e8-9816-c1acc00663f6] Deallocating network for instance {{(pid=61962) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2281}} [ 2122.176154] env[61962]: DEBUG nova.network.neutron [-] [instance: 2a520bab-558e-49e8-9816-c1acc00663f6] deallocate_for_instance() {{(pid=61962) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 2122.971034] env[61962]: DEBUG nova.network.neutron [-] [instance: 2a520bab-558e-49e8-9816-c1acc00663f6] Updating instance_info_cache with network_info: [] {{(pid=61962) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2122.990639] env[61962]: DEBUG nova.compute.manager [req-a8f5b597-1234-45dd-ba0b-fc83580eefc1 req-bd2d8c03-b2c9-443c-b08e-2925578858c1 service nova] [instance: 2a520bab-558e-49e8-9816-c1acc00663f6] Received event network-vif-deleted-b391cad7-579a-4129-a648-832d83c15c16 {{(pid=61962) external_instance_event /opt/stack/nova/nova/compute/manager.py:11210}} [ 2122.990639] env[61962]: INFO nova.compute.manager [req-a8f5b597-1234-45dd-ba0b-fc83580eefc1 req-bd2d8c03-b2c9-443c-b08e-2925578858c1 service nova] [instance: 2a520bab-558e-49e8-9816-c1acc00663f6] Neutron deleted interface b391cad7-579a-4129-a648-832d83c15c16; detaching it from the instance and deleting it from the info cache [ 2122.990639] env[61962]: DEBUG nova.network.neutron [req-a8f5b597-1234-45dd-ba0b-fc83580eefc1 req-bd2d8c03-b2c9-443c-b08e-2925578858c1 service nova] [instance: 2a520bab-558e-49e8-9816-c1acc00663f6] Updating instance_info_cache with network_info: [] {{(pid=61962) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2123.002022] env[61962]: INFO nova.compute.manager [-] [instance: 2a520bab-558e-49e8-9816-c1acc00663f6] Took 0.82 seconds to deallocate network for instance. [ 2123.009201] env[61962]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-fe471b81-1a1e-43b7-b63f-2a3ba68fd7a4 {{(pid=61962) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2123.016860] env[61962]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3a144c95-70a4-4d5a-a54b-d3ee82d39b5a {{(pid=61962) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2123.050751] env[61962]: DEBUG nova.compute.manager [req-a8f5b597-1234-45dd-ba0b-fc83580eefc1 req-bd2d8c03-b2c9-443c-b08e-2925578858c1 service nova] [instance: 2a520bab-558e-49e8-9816-c1acc00663f6] Detach interface failed, port_id=b391cad7-579a-4129-a648-832d83c15c16, reason: Instance 2a520bab-558e-49e8-9816-c1acc00663f6 could not be found. {{(pid=61962) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11044}} [ 2123.096136] env[61962]: INFO nova.compute.manager [None req-2d89cc0b-d44c-4715-94bf-bf1238860dd6 tempest-ServersTestBootFromVolume-1925232432 tempest-ServersTestBootFromVolume-1925232432-project-member] [instance: 2a520bab-558e-49e8-9816-c1acc00663f6] Took 0.10 seconds to detach 1 volumes for instance. [ 2123.099850] env[61962]: DEBUG nova.compute.manager [None req-2d89cc0b-d44c-4715-94bf-bf1238860dd6 tempest-ServersTestBootFromVolume-1925232432 tempest-ServersTestBootFromVolume-1925232432-project-member] [instance: 2a520bab-558e-49e8-9816-c1acc00663f6] Deleting volume: 283acf53-fbac-4900-a61f-a572b75ac300 {{(pid=61962) _cleanup_volumes /opt/stack/nova/nova/compute/manager.py:3239}} [ 2123.222782] env[61962]: DEBUG oslo_concurrency.lockutils [None req-2d89cc0b-d44c-4715-94bf-bf1238860dd6 tempest-ServersTestBootFromVolume-1925232432 tempest-ServersTestBootFromVolume-1925232432-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=61962) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2123.223199] env[61962]: DEBUG oslo_concurrency.lockutils [None req-2d89cc0b-d44c-4715-94bf-bf1238860dd6 tempest-ServersTestBootFromVolume-1925232432 tempest-ServersTestBootFromVolume-1925232432-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.000s {{(pid=61962) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2123.223325] env[61962]: DEBUG nova.objects.instance [None req-2d89cc0b-d44c-4715-94bf-bf1238860dd6 tempest-ServersTestBootFromVolume-1925232432 tempest-ServersTestBootFromVolume-1925232432-project-member] Lazy-loading 'resources' on Instance uuid 2a520bab-558e-49e8-9816-c1acc00663f6 {{(pid=61962) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 2123.316279] env[61962]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e6607325-fb91-4818-810e-96e0e77ea600 {{(pid=61962) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2123.325449] env[61962]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e50ed882-bdaa-4756-b5ea-d8daa0b5f19d {{(pid=61962) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2123.360744] env[61962]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7141fc22-9006-4d81-8161-0048fa980e84 {{(pid=61962) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2123.369836] env[61962]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-90af38cb-a973-4ee6-b4da-090c34bc0156 {{(pid=61962) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2123.384813] env[61962]: DEBUG nova.compute.provider_tree [None req-2d89cc0b-d44c-4715-94bf-bf1238860dd6 tempest-ServersTestBootFromVolume-1925232432 tempest-ServersTestBootFromVolume-1925232432-project-member] Inventory has not changed in ProviderTree for provider: 5a20dc57-fddd-49ec-bab5-953a03eebaa1 {{(pid=61962) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2123.402972] env[61962]: DEBUG nova.scheduler.client.report [None req-2d89cc0b-d44c-4715-94bf-bf1238860dd6 tempest-ServersTestBootFromVolume-1925232432 tempest-ServersTestBootFromVolume-1925232432-project-member] Inventory has not changed for provider 5a20dc57-fddd-49ec-bab5-953a03eebaa1 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 96, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61962) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 2123.427884] env[61962]: DEBUG oslo_concurrency.lockutils [None req-2d89cc0b-d44c-4715-94bf-bf1238860dd6 tempest-ServersTestBootFromVolume-1925232432 tempest-ServersTestBootFromVolume-1925232432-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 0.204s {{(pid=61962) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2123.452836] env[61962]: INFO nova.scheduler.client.report [None req-2d89cc0b-d44c-4715-94bf-bf1238860dd6 tempest-ServersTestBootFromVolume-1925232432 tempest-ServersTestBootFromVolume-1925232432-project-member] Deleted allocations for instance 2a520bab-558e-49e8-9816-c1acc00663f6 [ 2123.503383] env[61962]: DEBUG oslo_concurrency.lockutils [None req-2d89cc0b-d44c-4715-94bf-bf1238860dd6 tempest-ServersTestBootFromVolume-1925232432 tempest-ServersTestBootFromVolume-1925232432-project-member] Lock "2a520bab-558e-49e8-9816-c1acc00663f6" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 3.561s {{(pid=61962) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2130.493044] env[61962]: DEBUG oslo_concurrency.lockutils [None req-421224d2-c9e0-4f53-b6ec-296507e9118a tempest-DeleteServersTestJSON-1634339650 tempest-DeleteServersTestJSON-1634339650-project-member] Acquiring lock "c2e03a3a-a3ac-4661-90f1-1e895a71b289" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61962) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2130.493426] env[61962]: DEBUG oslo_concurrency.lockutils [None req-421224d2-c9e0-4f53-b6ec-296507e9118a tempest-DeleteServersTestJSON-1634339650 tempest-DeleteServersTestJSON-1634339650-project-member] Lock "c2e03a3a-a3ac-4661-90f1-1e895a71b289" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61962) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2130.505701] env[61962]: DEBUG nova.compute.manager [None req-421224d2-c9e0-4f53-b6ec-296507e9118a tempest-DeleteServersTestJSON-1634339650 tempest-DeleteServersTestJSON-1634339650-project-member] [instance: c2e03a3a-a3ac-4661-90f1-1e895a71b289] Starting instance... {{(pid=61962) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2424}} [ 2130.561342] env[61962]: DEBUG oslo_concurrency.lockutils [None req-421224d2-c9e0-4f53-b6ec-296507e9118a tempest-DeleteServersTestJSON-1634339650 tempest-DeleteServersTestJSON-1634339650-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61962) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2130.561622] env[61962]: DEBUG oslo_concurrency.lockutils [None req-421224d2-c9e0-4f53-b6ec-296507e9118a tempest-DeleteServersTestJSON-1634339650 tempest-DeleteServersTestJSON-1634339650-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=61962) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2130.563287] env[61962]: INFO nova.compute.claims [None req-421224d2-c9e0-4f53-b6ec-296507e9118a tempest-DeleteServersTestJSON-1634339650 tempest-DeleteServersTestJSON-1634339650-project-member] [instance: c2e03a3a-a3ac-4661-90f1-1e895a71b289] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 2130.674929] env[61962]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0b52c07b-6ec5-4171-902f-34c6e4fd2653 {{(pid=61962) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2130.683304] env[61962]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c61a853c-6ddd-4a69-9a53-d3c44faf804a {{(pid=61962) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2130.714525] env[61962]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e591e131-500f-467c-9a0a-0d04388a777c {{(pid=61962) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2130.722752] env[61962]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0dbe28b5-3a97-425b-aeb5-e2185e35ad7f {{(pid=61962) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2130.737131] env[61962]: DEBUG nova.compute.provider_tree [None req-421224d2-c9e0-4f53-b6ec-296507e9118a tempest-DeleteServersTestJSON-1634339650 tempest-DeleteServersTestJSON-1634339650-project-member] Inventory has not changed in ProviderTree for provider: 5a20dc57-fddd-49ec-bab5-953a03eebaa1 {{(pid=61962) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2130.750479] env[61962]: DEBUG nova.scheduler.client.report [None req-421224d2-c9e0-4f53-b6ec-296507e9118a tempest-DeleteServersTestJSON-1634339650 tempest-DeleteServersTestJSON-1634339650-project-member] Inventory has not changed for provider 5a20dc57-fddd-49ec-bab5-953a03eebaa1 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 96, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61962) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 2130.769844] env[61962]: DEBUG oslo_concurrency.lockutils [None req-421224d2-c9e0-4f53-b6ec-296507e9118a tempest-DeleteServersTestJSON-1634339650 tempest-DeleteServersTestJSON-1634339650-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.208s {{(pid=61962) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2130.770437] env[61962]: DEBUG nova.compute.manager [None req-421224d2-c9e0-4f53-b6ec-296507e9118a tempest-DeleteServersTestJSON-1634339650 tempest-DeleteServersTestJSON-1634339650-project-member] [instance: c2e03a3a-a3ac-4661-90f1-1e895a71b289] Start building networks asynchronously for instance. {{(pid=61962) _build_resources /opt/stack/nova/nova/compute/manager.py:2821}} [ 2130.805407] env[61962]: DEBUG nova.compute.utils [None req-421224d2-c9e0-4f53-b6ec-296507e9118a tempest-DeleteServersTestJSON-1634339650 tempest-DeleteServersTestJSON-1634339650-project-member] Using /dev/sd instead of None {{(pid=61962) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 2130.807066] env[61962]: DEBUG nova.compute.manager [None req-421224d2-c9e0-4f53-b6ec-296507e9118a tempest-DeleteServersTestJSON-1634339650 tempest-DeleteServersTestJSON-1634339650-project-member] [instance: c2e03a3a-a3ac-4661-90f1-1e895a71b289] Allocating IP information in the background. {{(pid=61962) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1973}} [ 2130.807209] env[61962]: DEBUG nova.network.neutron [None req-421224d2-c9e0-4f53-b6ec-296507e9118a tempest-DeleteServersTestJSON-1634339650 tempest-DeleteServersTestJSON-1634339650-project-member] [instance: c2e03a3a-a3ac-4661-90f1-1e895a71b289] allocate_for_instance() {{(pid=61962) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 2130.838803] env[61962]: DEBUG nova.compute.manager [None req-421224d2-c9e0-4f53-b6ec-296507e9118a tempest-DeleteServersTestJSON-1634339650 tempest-DeleteServersTestJSON-1634339650-project-member] [instance: c2e03a3a-a3ac-4661-90f1-1e895a71b289] Start building block device mappings for instance. {{(pid=61962) _build_resources /opt/stack/nova/nova/compute/manager.py:2856}} [ 2130.876571] env[61962]: DEBUG nova.policy [None req-421224d2-c9e0-4f53-b6ec-296507e9118a tempest-DeleteServersTestJSON-1634339650 tempest-DeleteServersTestJSON-1634339650-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'f8b17c7d1eea48db9fef8d05689059e6', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'd09904dce58b4cee83cebb57e6ad26a4', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61962) authorize /opt/stack/nova/nova/policy.py:203}} [ 2130.905734] env[61962]: DEBUG nova.compute.manager [None req-421224d2-c9e0-4f53-b6ec-296507e9118a tempest-DeleteServersTestJSON-1634339650 tempest-DeleteServersTestJSON-1634339650-project-member] [instance: c2e03a3a-a3ac-4661-90f1-1e895a71b289] Start spawning the instance on the hypervisor. {{(pid=61962) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2630}} [ 2130.934091] env[61962]: DEBUG nova.virt.hardware [None req-421224d2-c9e0-4f53-b6ec-296507e9118a tempest-DeleteServersTestJSON-1634339650 tempest-DeleteServersTestJSON-1634339650-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-12-01T12:09:23Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=128,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-12-01T12:09:07Z,direct_url=,disk_format='vmdk',id=f684bb17-3ab2-4bd5-a19e-4c36c57d0ebe,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='eef556fb5c4f49b889491ae31a496de5',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-12-01T12:09:08Z,virtual_size=,visibility=), allow threads: False {{(pid=61962) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 2130.934307] env[61962]: DEBUG nova.virt.hardware [None req-421224d2-c9e0-4f53-b6ec-296507e9118a tempest-DeleteServersTestJSON-1634339650 tempest-DeleteServersTestJSON-1634339650-project-member] Flavor limits 0:0:0 {{(pid=61962) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 2130.934479] env[61962]: DEBUG nova.virt.hardware [None req-421224d2-c9e0-4f53-b6ec-296507e9118a tempest-DeleteServersTestJSON-1634339650 tempest-DeleteServersTestJSON-1634339650-project-member] Image limits 0:0:0 {{(pid=61962) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 2130.934675] env[61962]: DEBUG nova.virt.hardware [None req-421224d2-c9e0-4f53-b6ec-296507e9118a tempest-DeleteServersTestJSON-1634339650 tempest-DeleteServersTestJSON-1634339650-project-member] Flavor pref 0:0:0 {{(pid=61962) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 2130.934825] env[61962]: DEBUG nova.virt.hardware [None req-421224d2-c9e0-4f53-b6ec-296507e9118a tempest-DeleteServersTestJSON-1634339650 tempest-DeleteServersTestJSON-1634339650-project-member] Image pref 0:0:0 {{(pid=61962) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 2130.934974] env[61962]: DEBUG nova.virt.hardware [None req-421224d2-c9e0-4f53-b6ec-296507e9118a tempest-DeleteServersTestJSON-1634339650 tempest-DeleteServersTestJSON-1634339650-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61962) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 2130.935209] env[61962]: DEBUG nova.virt.hardware [None req-421224d2-c9e0-4f53-b6ec-296507e9118a tempest-DeleteServersTestJSON-1634339650 tempest-DeleteServersTestJSON-1634339650-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61962) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 2130.935368] env[61962]: DEBUG nova.virt.hardware [None req-421224d2-c9e0-4f53-b6ec-296507e9118a tempest-DeleteServersTestJSON-1634339650 tempest-DeleteServersTestJSON-1634339650-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61962) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 2130.935539] env[61962]: DEBUG nova.virt.hardware [None req-421224d2-c9e0-4f53-b6ec-296507e9118a tempest-DeleteServersTestJSON-1634339650 tempest-DeleteServersTestJSON-1634339650-project-member] Got 1 possible topologies {{(pid=61962) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 2130.935704] env[61962]: DEBUG nova.virt.hardware [None req-421224d2-c9e0-4f53-b6ec-296507e9118a tempest-DeleteServersTestJSON-1634339650 tempest-DeleteServersTestJSON-1634339650-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61962) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 2130.935915] env[61962]: DEBUG nova.virt.hardware [None req-421224d2-c9e0-4f53-b6ec-296507e9118a tempest-DeleteServersTestJSON-1634339650 tempest-DeleteServersTestJSON-1634339650-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61962) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 2130.936787] env[61962]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ef706ef1-55b3-44fa-a583-6e7993b16d33 {{(pid=61962) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2130.946928] env[61962]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-967deefc-b6f5-4a49-8f01-ef932a87d75c {{(pid=61962) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2131.244468] env[61962]: DEBUG nova.network.neutron [None req-421224d2-c9e0-4f53-b6ec-296507e9118a tempest-DeleteServersTestJSON-1634339650 tempest-DeleteServersTestJSON-1634339650-project-member] [instance: c2e03a3a-a3ac-4661-90f1-1e895a71b289] Successfully created port: 5dc7a3cd-152e-496a-afb3-6cb21639ad8a {{(pid=61962) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 2131.883883] env[61962]: DEBUG nova.compute.manager [req-04c73733-3294-4eaf-905f-ed56e9d891be req-79285bc4-43b9-43fa-8340-a4a3e984f18e service nova] [instance: c2e03a3a-a3ac-4661-90f1-1e895a71b289] Received event network-vif-plugged-5dc7a3cd-152e-496a-afb3-6cb21639ad8a {{(pid=61962) external_instance_event /opt/stack/nova/nova/compute/manager.py:11210}} [ 2131.884174] env[61962]: DEBUG oslo_concurrency.lockutils [req-04c73733-3294-4eaf-905f-ed56e9d891be req-79285bc4-43b9-43fa-8340-a4a3e984f18e service nova] Acquiring lock "c2e03a3a-a3ac-4661-90f1-1e895a71b289-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=61962) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2131.884333] env[61962]: DEBUG oslo_concurrency.lockutils [req-04c73733-3294-4eaf-905f-ed56e9d891be req-79285bc4-43b9-43fa-8340-a4a3e984f18e service nova] Lock "c2e03a3a-a3ac-4661-90f1-1e895a71b289-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=61962) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2131.884505] env[61962]: DEBUG oslo_concurrency.lockutils [req-04c73733-3294-4eaf-905f-ed56e9d891be req-79285bc4-43b9-43fa-8340-a4a3e984f18e service nova] Lock "c2e03a3a-a3ac-4661-90f1-1e895a71b289-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=61962) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2131.884676] env[61962]: DEBUG nova.compute.manager [req-04c73733-3294-4eaf-905f-ed56e9d891be req-79285bc4-43b9-43fa-8340-a4a3e984f18e service nova] [instance: c2e03a3a-a3ac-4661-90f1-1e895a71b289] No waiting events found dispatching network-vif-plugged-5dc7a3cd-152e-496a-afb3-6cb21639ad8a {{(pid=61962) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 2131.884841] env[61962]: WARNING nova.compute.manager [req-04c73733-3294-4eaf-905f-ed56e9d891be req-79285bc4-43b9-43fa-8340-a4a3e984f18e service nova] [instance: c2e03a3a-a3ac-4661-90f1-1e895a71b289] Received unexpected event network-vif-plugged-5dc7a3cd-152e-496a-afb3-6cb21639ad8a for instance with vm_state building and task_state spawning. [ 2132.211837] env[61962]: DEBUG nova.network.neutron [None req-421224d2-c9e0-4f53-b6ec-296507e9118a tempest-DeleteServersTestJSON-1634339650 tempest-DeleteServersTestJSON-1634339650-project-member] [instance: c2e03a3a-a3ac-4661-90f1-1e895a71b289] Successfully updated port: 5dc7a3cd-152e-496a-afb3-6cb21639ad8a {{(pid=61962) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 2132.231060] env[61962]: DEBUG oslo_concurrency.lockutils [None req-421224d2-c9e0-4f53-b6ec-296507e9118a tempest-DeleteServersTestJSON-1634339650 tempest-DeleteServersTestJSON-1634339650-project-member] Acquiring lock "refresh_cache-c2e03a3a-a3ac-4661-90f1-1e895a71b289" {{(pid=61962) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2132.231233] env[61962]: DEBUG oslo_concurrency.lockutils [None req-421224d2-c9e0-4f53-b6ec-296507e9118a tempest-DeleteServersTestJSON-1634339650 tempest-DeleteServersTestJSON-1634339650-project-member] Acquired lock "refresh_cache-c2e03a3a-a3ac-4661-90f1-1e895a71b289" {{(pid=61962) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2132.231398] env[61962]: DEBUG nova.network.neutron [None req-421224d2-c9e0-4f53-b6ec-296507e9118a tempest-DeleteServersTestJSON-1634339650 tempest-DeleteServersTestJSON-1634339650-project-member] [instance: c2e03a3a-a3ac-4661-90f1-1e895a71b289] Building network info cache for instance {{(pid=61962) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 2132.275938] env[61962]: DEBUG nova.network.neutron [None req-421224d2-c9e0-4f53-b6ec-296507e9118a tempest-DeleteServersTestJSON-1634339650 tempest-DeleteServersTestJSON-1634339650-project-member] [instance: c2e03a3a-a3ac-4661-90f1-1e895a71b289] Instance cache missing network info. {{(pid=61962) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 2132.530860] env[61962]: DEBUG nova.network.neutron [None req-421224d2-c9e0-4f53-b6ec-296507e9118a tempest-DeleteServersTestJSON-1634339650 tempest-DeleteServersTestJSON-1634339650-project-member] [instance: c2e03a3a-a3ac-4661-90f1-1e895a71b289] Updating instance_info_cache with network_info: [{"id": "5dc7a3cd-152e-496a-afb3-6cb21639ad8a", "address": "fa:16:3e:e7:a5:ed", "network": {"id": "f502c11b-57b9-4f6c-a30a-2f36f8e77ee0", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-1357472377-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "d09904dce58b4cee83cebb57e6ad26a4", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "0a88b707-352e-4be7-b1d6-ad6074b40ed9", "external-id": "nsx-vlan-transportzone-789", "segmentation_id": 789, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap5dc7a3cd-15", "ovs_interfaceid": "5dc7a3cd-152e-496a-afb3-6cb21639ad8a", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61962) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2132.546749] env[61962]: DEBUG oslo_concurrency.lockutils [None req-421224d2-c9e0-4f53-b6ec-296507e9118a tempest-DeleteServersTestJSON-1634339650 tempest-DeleteServersTestJSON-1634339650-project-member] Releasing lock "refresh_cache-c2e03a3a-a3ac-4661-90f1-1e895a71b289" {{(pid=61962) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2132.547063] env[61962]: DEBUG nova.compute.manager [None req-421224d2-c9e0-4f53-b6ec-296507e9118a tempest-DeleteServersTestJSON-1634339650 tempest-DeleteServersTestJSON-1634339650-project-member] [instance: c2e03a3a-a3ac-4661-90f1-1e895a71b289] Instance network_info: |[{"id": "5dc7a3cd-152e-496a-afb3-6cb21639ad8a", "address": "fa:16:3e:e7:a5:ed", "network": {"id": "f502c11b-57b9-4f6c-a30a-2f36f8e77ee0", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-1357472377-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "d09904dce58b4cee83cebb57e6ad26a4", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "0a88b707-352e-4be7-b1d6-ad6074b40ed9", "external-id": "nsx-vlan-transportzone-789", "segmentation_id": 789, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap5dc7a3cd-15", "ovs_interfaceid": "5dc7a3cd-152e-496a-afb3-6cb21639ad8a", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=61962) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 2132.547469] env[61962]: DEBUG nova.virt.vmwareapi.vmops [None req-421224d2-c9e0-4f53-b6ec-296507e9118a tempest-DeleteServersTestJSON-1634339650 tempest-DeleteServersTestJSON-1634339650-project-member] [instance: c2e03a3a-a3ac-4661-90f1-1e895a71b289] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:e7:a5:ed', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '0a88b707-352e-4be7-b1d6-ad6074b40ed9', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '5dc7a3cd-152e-496a-afb3-6cb21639ad8a', 'vif_model': 'vmxnet3'}] {{(pid=61962) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 2132.555048] env[61962]: DEBUG nova.virt.vmwareapi.vm_util [None req-421224d2-c9e0-4f53-b6ec-296507e9118a tempest-DeleteServersTestJSON-1634339650 tempest-DeleteServersTestJSON-1634339650-project-member] Creating folder: Project (d09904dce58b4cee83cebb57e6ad26a4). Parent ref: group-v953327. {{(pid=61962) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 2132.556024] env[61962]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-7f000d09-d012-4da2-8ccb-fcf6dda304d7 {{(pid=61962) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2132.568007] env[61962]: INFO nova.virt.vmwareapi.vm_util [None req-421224d2-c9e0-4f53-b6ec-296507e9118a tempest-DeleteServersTestJSON-1634339650 tempest-DeleteServersTestJSON-1634339650-project-member] Created folder: Project (d09904dce58b4cee83cebb57e6ad26a4) in parent group-v953327. [ 2132.568286] env[61962]: DEBUG nova.virt.vmwareapi.vm_util [None req-421224d2-c9e0-4f53-b6ec-296507e9118a tempest-DeleteServersTestJSON-1634339650 tempest-DeleteServersTestJSON-1634339650-project-member] Creating folder: Instances. Parent ref: group-v953432. {{(pid=61962) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 2132.568534] env[61962]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-7a55d2b6-7502-4d0d-aee7-5e8de1b69473 {{(pid=61962) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2132.578892] env[61962]: INFO nova.virt.vmwareapi.vm_util [None req-421224d2-c9e0-4f53-b6ec-296507e9118a tempest-DeleteServersTestJSON-1634339650 tempest-DeleteServersTestJSON-1634339650-project-member] Created folder: Instances in parent group-v953432. [ 2132.579280] env[61962]: DEBUG oslo.service.loopingcall [None req-421224d2-c9e0-4f53-b6ec-296507e9118a tempest-DeleteServersTestJSON-1634339650 tempest-DeleteServersTestJSON-1634339650-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=61962) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 2132.579854] env[61962]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: c2e03a3a-a3ac-4661-90f1-1e895a71b289] Creating VM on the ESX host {{(pid=61962) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 2132.579854] env[61962]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-adda72d0-89e5-44ae-a38e-a129229e8533 {{(pid=61962) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2132.599814] env[61962]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 2132.599814] env[61962]: value = "task-4892128" [ 2132.599814] env[61962]: _type = "Task" [ 2132.599814] env[61962]: } to complete. {{(pid=61962) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2132.607843] env[61962]: DEBUG oslo_vmware.api [-] Task: {'id': task-4892128, 'name': CreateVM_Task} progress is 0%. {{(pid=61962) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2133.110325] env[61962]: DEBUG oslo_vmware.api [-] Task: {'id': task-4892128, 'name': CreateVM_Task, 'duration_secs': 0.276249} completed successfully. {{(pid=61962) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2133.110709] env[61962]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: c2e03a3a-a3ac-4661-90f1-1e895a71b289] Created VM on the ESX host {{(pid=61962) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 2133.111252] env[61962]: DEBUG oslo_concurrency.lockutils [None req-421224d2-c9e0-4f53-b6ec-296507e9118a tempest-DeleteServersTestJSON-1634339650 tempest-DeleteServersTestJSON-1634339650-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/f684bb17-3ab2-4bd5-a19e-4c36c57d0ebe" {{(pid=61962) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2133.111423] env[61962]: DEBUG oslo_concurrency.lockutils [None req-421224d2-c9e0-4f53-b6ec-296507e9118a tempest-DeleteServersTestJSON-1634339650 tempest-DeleteServersTestJSON-1634339650-project-member] Acquired lock "[datastore2] devstack-image-cache_base/f684bb17-3ab2-4bd5-a19e-4c36c57d0ebe" {{(pid=61962) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2133.111738] env[61962]: DEBUG oslo_concurrency.lockutils [None req-421224d2-c9e0-4f53-b6ec-296507e9118a tempest-DeleteServersTestJSON-1634339650 tempest-DeleteServersTestJSON-1634339650-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/f684bb17-3ab2-4bd5-a19e-4c36c57d0ebe" {{(pid=61962) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 2133.111983] env[61962]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-447806bb-4f76-4345-9754-8263e509034b {{(pid=61962) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2133.116492] env[61962]: DEBUG oslo_vmware.api [None req-421224d2-c9e0-4f53-b6ec-296507e9118a tempest-DeleteServersTestJSON-1634339650 tempest-DeleteServersTestJSON-1634339650-project-member] Waiting for the task: (returnval){ [ 2133.116492] env[61962]: value = "session[5210918f-aadc-9b29-42fa-0929c2e42627]52a0fd7f-d4c6-0941-bd4c-46258ccbeac4" [ 2133.116492] env[61962]: _type = "Task" [ 2133.116492] env[61962]: } to complete. {{(pid=61962) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2133.124554] env[61962]: DEBUG oslo_vmware.api [None req-421224d2-c9e0-4f53-b6ec-296507e9118a tempest-DeleteServersTestJSON-1634339650 tempest-DeleteServersTestJSON-1634339650-project-member] Task: {'id': session[5210918f-aadc-9b29-42fa-0929c2e42627]52a0fd7f-d4c6-0941-bd4c-46258ccbeac4, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61962) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2133.627989] env[61962]: DEBUG oslo_concurrency.lockutils [None req-421224d2-c9e0-4f53-b6ec-296507e9118a tempest-DeleteServersTestJSON-1634339650 tempest-DeleteServersTestJSON-1634339650-project-member] Releasing lock "[datastore2] devstack-image-cache_base/f684bb17-3ab2-4bd5-a19e-4c36c57d0ebe" {{(pid=61962) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2133.627989] env[61962]: DEBUG nova.virt.vmwareapi.vmops [None req-421224d2-c9e0-4f53-b6ec-296507e9118a tempest-DeleteServersTestJSON-1634339650 tempest-DeleteServersTestJSON-1634339650-project-member] [instance: c2e03a3a-a3ac-4661-90f1-1e895a71b289] Processing image f684bb17-3ab2-4bd5-a19e-4c36c57d0ebe {{(pid=61962) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 2133.627989] env[61962]: DEBUG oslo_concurrency.lockutils [None req-421224d2-c9e0-4f53-b6ec-296507e9118a tempest-DeleteServersTestJSON-1634339650 tempest-DeleteServersTestJSON-1634339650-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/f684bb17-3ab2-4bd5-a19e-4c36c57d0ebe/f684bb17-3ab2-4bd5-a19e-4c36c57d0ebe.vmdk" {{(pid=61962) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2134.139077] env[61962]: DEBUG nova.compute.manager [req-51255f2b-e77f-40c3-a3f2-aa90691e81da req-19cd91dd-99e1-4d1e-b5b9-8e70ec8da809 service nova] [instance: c2e03a3a-a3ac-4661-90f1-1e895a71b289] Received event network-changed-5dc7a3cd-152e-496a-afb3-6cb21639ad8a {{(pid=61962) external_instance_event /opt/stack/nova/nova/compute/manager.py:11210}} [ 2134.139336] env[61962]: DEBUG nova.compute.manager [req-51255f2b-e77f-40c3-a3f2-aa90691e81da req-19cd91dd-99e1-4d1e-b5b9-8e70ec8da809 service nova] [instance: c2e03a3a-a3ac-4661-90f1-1e895a71b289] Refreshing instance network info cache due to event network-changed-5dc7a3cd-152e-496a-afb3-6cb21639ad8a. {{(pid=61962) external_instance_event /opt/stack/nova/nova/compute/manager.py:11215}} [ 2134.139494] env[61962]: DEBUG oslo_concurrency.lockutils [req-51255f2b-e77f-40c3-a3f2-aa90691e81da req-19cd91dd-99e1-4d1e-b5b9-8e70ec8da809 service nova] Acquiring lock "refresh_cache-c2e03a3a-a3ac-4661-90f1-1e895a71b289" {{(pid=61962) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2134.139636] env[61962]: DEBUG oslo_concurrency.lockutils [req-51255f2b-e77f-40c3-a3f2-aa90691e81da req-19cd91dd-99e1-4d1e-b5b9-8e70ec8da809 service nova] Acquired lock "refresh_cache-c2e03a3a-a3ac-4661-90f1-1e895a71b289" {{(pid=61962) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2134.139793] env[61962]: DEBUG nova.network.neutron [req-51255f2b-e77f-40c3-a3f2-aa90691e81da req-19cd91dd-99e1-4d1e-b5b9-8e70ec8da809 service nova] [instance: c2e03a3a-a3ac-4661-90f1-1e895a71b289] Refreshing network info cache for port 5dc7a3cd-152e-496a-afb3-6cb21639ad8a {{(pid=61962) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 2134.158270] env[61962]: DEBUG oslo_service.periodic_task [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Running periodic task ComputeManager.update_available_resource {{(pid=61962) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2134.169428] env[61962]: DEBUG oslo_concurrency.lockutils [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=61962) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2134.169631] env[61962]: DEBUG oslo_concurrency.lockutils [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=61962) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2134.169795] env[61962]: DEBUG oslo_concurrency.lockutils [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=61962) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2134.169947] env[61962]: DEBUG nova.compute.resource_tracker [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=61962) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 2134.171029] env[61962]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-57a562cc-1bcd-42a4-87c1-9fffc2055137 {{(pid=61962) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2134.181901] env[61962]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6a8ccfd9-6fa6-4a05-be9a-f37e12f5b61f {{(pid=61962) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2134.195995] env[61962]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-abb3a71e-6bbf-49c4-a0a2-14414e595c31 {{(pid=61962) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2134.202940] env[61962]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-70f86e79-6dc4-45cc-ad63-7c68e55390fc {{(pid=61962) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2134.234274] env[61962]: DEBUG nova.compute.resource_tracker [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=180467MB free_disk=96GB free_vcpus=48 pci_devices=None {{(pid=61962) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 2134.234356] env[61962]: DEBUG oslo_concurrency.lockutils [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=61962) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2134.234559] env[61962]: DEBUG oslo_concurrency.lockutils [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=61962) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2134.294035] env[61962]: DEBUG nova.compute.resource_tracker [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Instance 02c198ea-61de-4e70-866c-39781d353d04 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61962) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 2134.294213] env[61962]: DEBUG nova.compute.resource_tracker [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Instance 85003695-cb3e-4bce-9a3a-a1d799b36369 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61962) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 2134.294345] env[61962]: DEBUG nova.compute.resource_tracker [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Instance 1a74c820-67e9-424c-b4e6-333db3449251 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61962) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 2134.294469] env[61962]: DEBUG nova.compute.resource_tracker [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Instance c2e03a3a-a3ac-4661-90f1-1e895a71b289 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61962) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 2134.294650] env[61962]: DEBUG nova.compute.resource_tracker [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Total usable vcpus: 48, total allocated vcpus: 4 {{(pid=61962) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 2134.294848] env[61962]: DEBUG nova.compute.resource_tracker [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=1024MB phys_disk=100GB used_disk=4GB total_vcpus=48 used_vcpus=4 pci_stats=[] stats={'failed_builds': '1', 'num_instances': '4', 'num_vm_building': '4', 'num_task_deleting': '3', 'num_os_type_None': '4', 'num_proj_cc34536d51ef4a22aef0b274ba5aa28e': '2', 'io_workload': '4', 'num_proj_e58b68272c404983993a09ad12e8c03b': '1', 'num_task_spawning': '1', 'num_proj_d09904dce58b4cee83cebb57e6ad26a4': '1'} {{(pid=61962) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 2134.367083] env[61962]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a53cc4be-ed3e-4d3f-80c6-39aee5a125e0 {{(pid=61962) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2134.375617] env[61962]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3807f5da-3526-4b99-aafe-71bcdcd2a826 {{(pid=61962) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2134.408938] env[61962]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6ca91eac-4946-46b7-9859-ccff4f360da6 {{(pid=61962) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2134.421030] env[61962]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a6a9afbf-be64-45c8-ad75-4da057513257 {{(pid=61962) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2134.436385] env[61962]: DEBUG nova.compute.provider_tree [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Inventory has not changed in ProviderTree for provider: 5a20dc57-fddd-49ec-bab5-953a03eebaa1 {{(pid=61962) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2134.446696] env[61962]: DEBUG nova.scheduler.client.report [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Inventory has not changed for provider 5a20dc57-fddd-49ec-bab5-953a03eebaa1 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 96, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61962) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 2134.461364] env[61962]: DEBUG nova.compute.resource_tracker [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=61962) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 2134.461565] env[61962]: DEBUG oslo_concurrency.lockutils [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 0.227s {{(pid=61962) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2134.462502] env[61962]: DEBUG nova.network.neutron [req-51255f2b-e77f-40c3-a3f2-aa90691e81da req-19cd91dd-99e1-4d1e-b5b9-8e70ec8da809 service nova] [instance: c2e03a3a-a3ac-4661-90f1-1e895a71b289] Updated VIF entry in instance network info cache for port 5dc7a3cd-152e-496a-afb3-6cb21639ad8a. {{(pid=61962) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 2134.462833] env[61962]: DEBUG nova.network.neutron [req-51255f2b-e77f-40c3-a3f2-aa90691e81da req-19cd91dd-99e1-4d1e-b5b9-8e70ec8da809 service nova] [instance: c2e03a3a-a3ac-4661-90f1-1e895a71b289] Updating instance_info_cache with network_info: [{"id": "5dc7a3cd-152e-496a-afb3-6cb21639ad8a", "address": "fa:16:3e:e7:a5:ed", "network": {"id": "f502c11b-57b9-4f6c-a30a-2f36f8e77ee0", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-1357472377-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "d09904dce58b4cee83cebb57e6ad26a4", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "0a88b707-352e-4be7-b1d6-ad6074b40ed9", "external-id": "nsx-vlan-transportzone-789", "segmentation_id": 789, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap5dc7a3cd-15", "ovs_interfaceid": "5dc7a3cd-152e-496a-afb3-6cb21639ad8a", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61962) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2134.472953] env[61962]: DEBUG oslo_concurrency.lockutils [req-51255f2b-e77f-40c3-a3f2-aa90691e81da req-19cd91dd-99e1-4d1e-b5b9-8e70ec8da809 service nova] Releasing lock "refresh_cache-c2e03a3a-a3ac-4661-90f1-1e895a71b289" {{(pid=61962) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2138.464291] env[61962]: DEBUG oslo_service.periodic_task [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=61962) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2138.464804] env[61962]: DEBUG nova.compute.manager [None req-ba337279-4320-40ca-b616-7e590432f203 None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=61962) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10626}} [ 2142.155029] env[61962]: DEBUG oslo_service.periodic_task [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=61962) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2144.159580] env[61962]: DEBUG oslo_service.periodic_task [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=61962) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2145.159485] env[61962]: DEBUG oslo_service.periodic_task [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=61962) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2148.158846] env[61962]: DEBUG oslo_service.periodic_task [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=61962) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2148.159302] env[61962]: DEBUG nova.compute.manager [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Starting heal instance info cache {{(pid=61962) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10007}} [ 2148.159302] env[61962]: DEBUG nova.compute.manager [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Rebuilding the list of instances to heal {{(pid=61962) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10011}} [ 2148.175464] env[61962]: DEBUG nova.compute.manager [None req-ba337279-4320-40ca-b616-7e590432f203 None None] [instance: 02c198ea-61de-4e70-866c-39781d353d04] Skipping network cache update for instance because it is Building. {{(pid=61962) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10020}} [ 2148.175626] env[61962]: DEBUG nova.compute.manager [None req-ba337279-4320-40ca-b616-7e590432f203 None None] [instance: 85003695-cb3e-4bce-9a3a-a1d799b36369] Skipping network cache update for instance because it is Building. {{(pid=61962) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10020}} [ 2148.175759] env[61962]: DEBUG nova.compute.manager [None req-ba337279-4320-40ca-b616-7e590432f203 None None] [instance: 1a74c820-67e9-424c-b4e6-333db3449251] Skipping network cache update for instance because it is Building. {{(pid=61962) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10020}} [ 2148.175890] env[61962]: DEBUG nova.compute.manager [None req-ba337279-4320-40ca-b616-7e590432f203 None None] [instance: c2e03a3a-a3ac-4661-90f1-1e895a71b289] Skipping network cache update for instance because it is Building. {{(pid=61962) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10020}} [ 2148.176022] env[61962]: DEBUG nova.compute.manager [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Didn't find any instances for network info cache update. {{(pid=61962) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10093}} [ 2150.158662] env[61962]: DEBUG oslo_service.periodic_task [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=61962) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2150.159094] env[61962]: DEBUG oslo_service.periodic_task [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=61962) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2150.159094] env[61962]: DEBUG oslo_service.periodic_task [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=61962) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2166.943552] env[61962]: WARNING oslo_vmware.rw_handles [None req-e364545b-7bcc-40d5-a3ad-56d3b52a7b42 tempest-ServersTestJSON-536512511 tempest-ServersTestJSON-536512511-project-member] Error occurred while reading the HTTP response.: http.client.RemoteDisconnected: Remote end closed connection without response [ 2166.943552] env[61962]: ERROR oslo_vmware.rw_handles Traceback (most recent call last): [ 2166.943552] env[61962]: ERROR oslo_vmware.rw_handles File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py", line 283, in close [ 2166.943552] env[61962]: ERROR oslo_vmware.rw_handles self._conn.getresponse() [ 2166.943552] env[61962]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 1375, in getresponse [ 2166.943552] env[61962]: ERROR oslo_vmware.rw_handles response.begin() [ 2166.943552] env[61962]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 318, in begin [ 2166.943552] env[61962]: ERROR oslo_vmware.rw_handles version, status, reason = self._read_status() [ 2166.943552] env[61962]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 287, in _read_status [ 2166.943552] env[61962]: ERROR oslo_vmware.rw_handles raise RemoteDisconnected("Remote end closed connection without" [ 2166.943552] env[61962]: ERROR oslo_vmware.rw_handles http.client.RemoteDisconnected: Remote end closed connection without response [ 2166.943552] env[61962]: ERROR oslo_vmware.rw_handles [ 2166.944292] env[61962]: DEBUG nova.virt.vmwareapi.images [None req-e364545b-7bcc-40d5-a3ad-56d3b52a7b42 tempest-ServersTestJSON-536512511 tempest-ServersTestJSON-536512511-project-member] [instance: 02c198ea-61de-4e70-866c-39781d353d04] Downloaded image file data f684bb17-3ab2-4bd5-a19e-4c36c57d0ebe to vmware_temp/f8fcbb21-db23-4c3d-a756-75970e41c1bb/f684bb17-3ab2-4bd5-a19e-4c36c57d0ebe/tmp-sparse.vmdk on the data store datastore2 {{(pid=61962) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:258}} [ 2166.945843] env[61962]: DEBUG nova.virt.vmwareapi.vmops [None req-e364545b-7bcc-40d5-a3ad-56d3b52a7b42 tempest-ServersTestJSON-536512511 tempest-ServersTestJSON-536512511-project-member] [instance: 02c198ea-61de-4e70-866c-39781d353d04] Caching image {{(pid=61962) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 2166.946116] env[61962]: DEBUG nova.virt.vmwareapi.vm_util [None req-e364545b-7bcc-40d5-a3ad-56d3b52a7b42 tempest-ServersTestJSON-536512511 tempest-ServersTestJSON-536512511-project-member] Copying Virtual Disk [datastore2] vmware_temp/f8fcbb21-db23-4c3d-a756-75970e41c1bb/f684bb17-3ab2-4bd5-a19e-4c36c57d0ebe/tmp-sparse.vmdk to [datastore2] vmware_temp/f8fcbb21-db23-4c3d-a756-75970e41c1bb/f684bb17-3ab2-4bd5-a19e-4c36c57d0ebe/f684bb17-3ab2-4bd5-a19e-4c36c57d0ebe.vmdk {{(pid=61962) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 2166.946422] env[61962]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-00524b1b-020d-4c02-a2f4-a148a1915999 {{(pid=61962) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2166.954083] env[61962]: DEBUG oslo_vmware.api [None req-e364545b-7bcc-40d5-a3ad-56d3b52a7b42 tempest-ServersTestJSON-536512511 tempest-ServersTestJSON-536512511-project-member] Waiting for the task: (returnval){ [ 2166.954083] env[61962]: value = "task-4892129" [ 2166.954083] env[61962]: _type = "Task" [ 2166.954083] env[61962]: } to complete. {{(pid=61962) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2166.963086] env[61962]: DEBUG oslo_vmware.api [None req-e364545b-7bcc-40d5-a3ad-56d3b52a7b42 tempest-ServersTestJSON-536512511 tempest-ServersTestJSON-536512511-project-member] Task: {'id': task-4892129, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61962) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2167.464861] env[61962]: DEBUG oslo_vmware.exceptions [None req-e364545b-7bcc-40d5-a3ad-56d3b52a7b42 tempest-ServersTestJSON-536512511 tempest-ServersTestJSON-536512511-project-member] Fault InvalidArgument not matched. {{(pid=61962) get_fault_class /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/exceptions.py:290}} [ 2167.465177] env[61962]: DEBUG oslo_concurrency.lockutils [None req-e364545b-7bcc-40d5-a3ad-56d3b52a7b42 tempest-ServersTestJSON-536512511 tempest-ServersTestJSON-536512511-project-member] Releasing lock "[datastore2] devstack-image-cache_base/f684bb17-3ab2-4bd5-a19e-4c36c57d0ebe/f684bb17-3ab2-4bd5-a19e-4c36c57d0ebe.vmdk" {{(pid=61962) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2167.465754] env[61962]: ERROR nova.compute.manager [None req-e364545b-7bcc-40d5-a3ad-56d3b52a7b42 tempest-ServersTestJSON-536512511 tempest-ServersTestJSON-536512511-project-member] [instance: 02c198ea-61de-4e70-866c-39781d353d04] Instance failed to spawn: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 2167.465754] env[61962]: Faults: ['InvalidArgument'] [ 2167.465754] env[61962]: ERROR nova.compute.manager [instance: 02c198ea-61de-4e70-866c-39781d353d04] Traceback (most recent call last): [ 2167.465754] env[61962]: ERROR nova.compute.manager [instance: 02c198ea-61de-4e70-866c-39781d353d04] File "/opt/stack/nova/nova/compute/manager.py", line 2886, in _build_resources [ 2167.465754] env[61962]: ERROR nova.compute.manager [instance: 02c198ea-61de-4e70-866c-39781d353d04] yield resources [ 2167.465754] env[61962]: ERROR nova.compute.manager [instance: 02c198ea-61de-4e70-866c-39781d353d04] File "/opt/stack/nova/nova/compute/manager.py", line 2633, in _build_and_run_instance [ 2167.465754] env[61962]: ERROR nova.compute.manager [instance: 02c198ea-61de-4e70-866c-39781d353d04] self.driver.spawn(context, instance, image_meta, [ 2167.465754] env[61962]: ERROR nova.compute.manager [instance: 02c198ea-61de-4e70-866c-39781d353d04] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 2167.465754] env[61962]: ERROR nova.compute.manager [instance: 02c198ea-61de-4e70-866c-39781d353d04] self._vmops.spawn(context, instance, image_meta, injected_files, [ 2167.465754] env[61962]: ERROR nova.compute.manager [instance: 02c198ea-61de-4e70-866c-39781d353d04] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 2167.465754] env[61962]: ERROR nova.compute.manager [instance: 02c198ea-61de-4e70-866c-39781d353d04] self._fetch_image_if_missing(context, vi) [ 2167.465754] env[61962]: ERROR nova.compute.manager [instance: 02c198ea-61de-4e70-866c-39781d353d04] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 2167.465754] env[61962]: ERROR nova.compute.manager [instance: 02c198ea-61de-4e70-866c-39781d353d04] image_cache(vi, tmp_image_ds_loc) [ 2167.466242] env[61962]: ERROR nova.compute.manager [instance: 02c198ea-61de-4e70-866c-39781d353d04] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 2167.466242] env[61962]: ERROR nova.compute.manager [instance: 02c198ea-61de-4e70-866c-39781d353d04] vm_util.copy_virtual_disk( [ 2167.466242] env[61962]: ERROR nova.compute.manager [instance: 02c198ea-61de-4e70-866c-39781d353d04] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 2167.466242] env[61962]: ERROR nova.compute.manager [instance: 02c198ea-61de-4e70-866c-39781d353d04] session._wait_for_task(vmdk_copy_task) [ 2167.466242] env[61962]: ERROR nova.compute.manager [instance: 02c198ea-61de-4e70-866c-39781d353d04] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 2167.466242] env[61962]: ERROR nova.compute.manager [instance: 02c198ea-61de-4e70-866c-39781d353d04] return self.wait_for_task(task_ref) [ 2167.466242] env[61962]: ERROR nova.compute.manager [instance: 02c198ea-61de-4e70-866c-39781d353d04] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 2167.466242] env[61962]: ERROR nova.compute.manager [instance: 02c198ea-61de-4e70-866c-39781d353d04] return evt.wait() [ 2167.466242] env[61962]: ERROR nova.compute.manager [instance: 02c198ea-61de-4e70-866c-39781d353d04] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 2167.466242] env[61962]: ERROR nova.compute.manager [instance: 02c198ea-61de-4e70-866c-39781d353d04] result = hub.switch() [ 2167.466242] env[61962]: ERROR nova.compute.manager [instance: 02c198ea-61de-4e70-866c-39781d353d04] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 2167.466242] env[61962]: ERROR nova.compute.manager [instance: 02c198ea-61de-4e70-866c-39781d353d04] return self.greenlet.switch() [ 2167.466242] env[61962]: ERROR nova.compute.manager [instance: 02c198ea-61de-4e70-866c-39781d353d04] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 2167.466869] env[61962]: ERROR nova.compute.manager [instance: 02c198ea-61de-4e70-866c-39781d353d04] self.f(*self.args, **self.kw) [ 2167.466869] env[61962]: ERROR nova.compute.manager [instance: 02c198ea-61de-4e70-866c-39781d353d04] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 2167.466869] env[61962]: ERROR nova.compute.manager [instance: 02c198ea-61de-4e70-866c-39781d353d04] raise exceptions.translate_fault(task_info.error) [ 2167.466869] env[61962]: ERROR nova.compute.manager [instance: 02c198ea-61de-4e70-866c-39781d353d04] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 2167.466869] env[61962]: ERROR nova.compute.manager [instance: 02c198ea-61de-4e70-866c-39781d353d04] Faults: ['InvalidArgument'] [ 2167.466869] env[61962]: ERROR nova.compute.manager [instance: 02c198ea-61de-4e70-866c-39781d353d04] [ 2167.466869] env[61962]: INFO nova.compute.manager [None req-e364545b-7bcc-40d5-a3ad-56d3b52a7b42 tempest-ServersTestJSON-536512511 tempest-ServersTestJSON-536512511-project-member] [instance: 02c198ea-61de-4e70-866c-39781d353d04] Terminating instance [ 2167.467723] env[61962]: DEBUG oslo_concurrency.lockutils [None req-36219049-67ba-4c74-811b-90c296d8053e tempest-ImagesTestJSON-240028984 tempest-ImagesTestJSON-240028984-project-member] Acquired lock "[datastore2] devstack-image-cache_base/f684bb17-3ab2-4bd5-a19e-4c36c57d0ebe/f684bb17-3ab2-4bd5-a19e-4c36c57d0ebe.vmdk" {{(pid=61962) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2167.467930] env[61962]: DEBUG nova.virt.vmwareapi.ds_util [None req-36219049-67ba-4c74-811b-90c296d8053e tempest-ImagesTestJSON-240028984 tempest-ImagesTestJSON-240028984-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=61962) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 2167.468183] env[61962]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-e664eb2b-1b9f-41ca-87de-31b1470f2c9c {{(pid=61962) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2167.470449] env[61962]: DEBUG nova.compute.manager [None req-e364545b-7bcc-40d5-a3ad-56d3b52a7b42 tempest-ServersTestJSON-536512511 tempest-ServersTestJSON-536512511-project-member] [instance: 02c198ea-61de-4e70-866c-39781d353d04] Start destroying the instance on the hypervisor. {{(pid=61962) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3142}} [ 2167.470664] env[61962]: DEBUG nova.virt.vmwareapi.vmops [None req-e364545b-7bcc-40d5-a3ad-56d3b52a7b42 tempest-ServersTestJSON-536512511 tempest-ServersTestJSON-536512511-project-member] [instance: 02c198ea-61de-4e70-866c-39781d353d04] Destroying instance {{(pid=61962) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 2167.471410] env[61962]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9aa42a63-5a9a-4371-a044-b59ed98b07ce {{(pid=61962) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2167.478817] env[61962]: DEBUG nova.virt.vmwareapi.vmops [None req-e364545b-7bcc-40d5-a3ad-56d3b52a7b42 tempest-ServersTestJSON-536512511 tempest-ServersTestJSON-536512511-project-member] [instance: 02c198ea-61de-4e70-866c-39781d353d04] Unregistering the VM {{(pid=61962) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 2167.479094] env[61962]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-c01a553c-1c6d-4773-aa2f-a063d022466b {{(pid=61962) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2167.481593] env[61962]: DEBUG nova.virt.vmwareapi.ds_util [None req-36219049-67ba-4c74-811b-90c296d8053e tempest-ImagesTestJSON-240028984 tempest-ImagesTestJSON-240028984-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=61962) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 2167.481758] env[61962]: DEBUG nova.virt.vmwareapi.vmops [None req-36219049-67ba-4c74-811b-90c296d8053e tempest-ImagesTestJSON-240028984 tempest-ImagesTestJSON-240028984-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=61962) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 2167.482750] env[61962]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-bbee81f3-3733-4a0b-852c-6a9b6e832e61 {{(pid=61962) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2167.488050] env[61962]: DEBUG oslo_vmware.api [None req-36219049-67ba-4c74-811b-90c296d8053e tempest-ImagesTestJSON-240028984 tempest-ImagesTestJSON-240028984-project-member] Waiting for the task: (returnval){ [ 2167.488050] env[61962]: value = "session[5210918f-aadc-9b29-42fa-0929c2e42627]5286e900-6f54-d666-d3e7-6b2c295366fb" [ 2167.488050] env[61962]: _type = "Task" [ 2167.488050] env[61962]: } to complete. {{(pid=61962) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2167.554015] env[61962]: DEBUG nova.virt.vmwareapi.vmops [None req-e364545b-7bcc-40d5-a3ad-56d3b52a7b42 tempest-ServersTestJSON-536512511 tempest-ServersTestJSON-536512511-project-member] [instance: 02c198ea-61de-4e70-866c-39781d353d04] Unregistered the VM {{(pid=61962) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 2167.554253] env[61962]: DEBUG nova.virt.vmwareapi.vmops [None req-e364545b-7bcc-40d5-a3ad-56d3b52a7b42 tempest-ServersTestJSON-536512511 tempest-ServersTestJSON-536512511-project-member] [instance: 02c198ea-61de-4e70-866c-39781d353d04] Deleting contents of the VM from datastore datastore2 {{(pid=61962) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 2167.554411] env[61962]: DEBUG nova.virt.vmwareapi.ds_util [None req-e364545b-7bcc-40d5-a3ad-56d3b52a7b42 tempest-ServersTestJSON-536512511 tempest-ServersTestJSON-536512511-project-member] Deleting the datastore file [datastore2] 02c198ea-61de-4e70-866c-39781d353d04 {{(pid=61962) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 2167.554682] env[61962]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-aa9246c1-99a0-4217-927f-23bc6d827f44 {{(pid=61962) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2167.561137] env[61962]: DEBUG oslo_vmware.api [None req-e364545b-7bcc-40d5-a3ad-56d3b52a7b42 tempest-ServersTestJSON-536512511 tempest-ServersTestJSON-536512511-project-member] Waiting for the task: (returnval){ [ 2167.561137] env[61962]: value = "task-4892131" [ 2167.561137] env[61962]: _type = "Task" [ 2167.561137] env[61962]: } to complete. {{(pid=61962) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2167.569729] env[61962]: DEBUG oslo_vmware.api [None req-e364545b-7bcc-40d5-a3ad-56d3b52a7b42 tempest-ServersTestJSON-536512511 tempest-ServersTestJSON-536512511-project-member] Task: {'id': task-4892131, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61962) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2167.999744] env[61962]: DEBUG nova.virt.vmwareapi.vmops [None req-36219049-67ba-4c74-811b-90c296d8053e tempest-ImagesTestJSON-240028984 tempest-ImagesTestJSON-240028984-project-member] [instance: 85003695-cb3e-4bce-9a3a-a1d799b36369] Preparing fetch location {{(pid=61962) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 2168.000199] env[61962]: DEBUG nova.virt.vmwareapi.ds_util [None req-36219049-67ba-4c74-811b-90c296d8053e tempest-ImagesTestJSON-240028984 tempest-ImagesTestJSON-240028984-project-member] Creating directory with path [datastore2] vmware_temp/f69c39b8-904d-4488-bafe-e749db7e711e/f684bb17-3ab2-4bd5-a19e-4c36c57d0ebe {{(pid=61962) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 2168.000321] env[61962]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-c42d9c14-fcb8-4395-a39b-31f91e9c5190 {{(pid=61962) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2168.012523] env[61962]: DEBUG nova.virt.vmwareapi.ds_util [None req-36219049-67ba-4c74-811b-90c296d8053e tempest-ImagesTestJSON-240028984 tempest-ImagesTestJSON-240028984-project-member] Created directory with path [datastore2] vmware_temp/f69c39b8-904d-4488-bafe-e749db7e711e/f684bb17-3ab2-4bd5-a19e-4c36c57d0ebe {{(pid=61962) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 2168.012774] env[61962]: DEBUG nova.virt.vmwareapi.vmops [None req-36219049-67ba-4c74-811b-90c296d8053e tempest-ImagesTestJSON-240028984 tempest-ImagesTestJSON-240028984-project-member] [instance: 85003695-cb3e-4bce-9a3a-a1d799b36369] Fetch image to [datastore2] vmware_temp/f69c39b8-904d-4488-bafe-e749db7e711e/f684bb17-3ab2-4bd5-a19e-4c36c57d0ebe/tmp-sparse.vmdk {{(pid=61962) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 2168.012882] env[61962]: DEBUG nova.virt.vmwareapi.vmops [None req-36219049-67ba-4c74-811b-90c296d8053e tempest-ImagesTestJSON-240028984 tempest-ImagesTestJSON-240028984-project-member] [instance: 85003695-cb3e-4bce-9a3a-a1d799b36369] Downloading image file data f684bb17-3ab2-4bd5-a19e-4c36c57d0ebe to [datastore2] vmware_temp/f69c39b8-904d-4488-bafe-e749db7e711e/f684bb17-3ab2-4bd5-a19e-4c36c57d0ebe/tmp-sparse.vmdk on the data store datastore2 {{(pid=61962) _fetch_image_as_file /opt/stack/nova/nova/virt/vmwareapi/vmops.py:399}} [ 2168.013816] env[61962]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ce04ef2f-fdcf-4280-a444-8358e969906f {{(pid=61962) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2168.021216] env[61962]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3e953e28-d804-4018-a9e5-07af64e3a97c {{(pid=61962) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2168.033433] env[61962]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-42dc1e5a-ac85-4fb6-8e6b-289baff83557 {{(pid=61962) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2168.067835] env[61962]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ac22b362-78ad-4ac7-9ba7-9dfdff6ff569 {{(pid=61962) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2168.077126] env[61962]: DEBUG oslo_vmware.service [-] Invoking SessionManager.AcquireGenericServiceTicket with opID=oslo.vmware-f38a7f22-3c40-451f-a856-5bc64bee6f6f {{(pid=61962) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2168.078934] env[61962]: DEBUG oslo_vmware.api [None req-e364545b-7bcc-40d5-a3ad-56d3b52a7b42 tempest-ServersTestJSON-536512511 tempest-ServersTestJSON-536512511-project-member] Task: {'id': task-4892131, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.081578} completed successfully. {{(pid=61962) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2168.079226] env[61962]: DEBUG nova.virt.vmwareapi.ds_util [None req-e364545b-7bcc-40d5-a3ad-56d3b52a7b42 tempest-ServersTestJSON-536512511 tempest-ServersTestJSON-536512511-project-member] Deleted the datastore file {{(pid=61962) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 2168.079413] env[61962]: DEBUG nova.virt.vmwareapi.vmops [None req-e364545b-7bcc-40d5-a3ad-56d3b52a7b42 tempest-ServersTestJSON-536512511 tempest-ServersTestJSON-536512511-project-member] [instance: 02c198ea-61de-4e70-866c-39781d353d04] Deleted contents of the VM from datastore datastore2 {{(pid=61962) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 2168.079623] env[61962]: DEBUG nova.virt.vmwareapi.vmops [None req-e364545b-7bcc-40d5-a3ad-56d3b52a7b42 tempest-ServersTestJSON-536512511 tempest-ServersTestJSON-536512511-project-member] [instance: 02c198ea-61de-4e70-866c-39781d353d04] Instance destroyed {{(pid=61962) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 2168.079816] env[61962]: INFO nova.compute.manager [None req-e364545b-7bcc-40d5-a3ad-56d3b52a7b42 tempest-ServersTestJSON-536512511 tempest-ServersTestJSON-536512511-project-member] [instance: 02c198ea-61de-4e70-866c-39781d353d04] Took 0.61 seconds to destroy the instance on the hypervisor. [ 2168.082341] env[61962]: DEBUG nova.compute.claims [None req-e364545b-7bcc-40d5-a3ad-56d3b52a7b42 tempest-ServersTestJSON-536512511 tempest-ServersTestJSON-536512511-project-member] [instance: 02c198ea-61de-4e70-866c-39781d353d04] Aborting claim: {{(pid=61962) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 2168.082506] env[61962]: DEBUG oslo_concurrency.lockutils [None req-e364545b-7bcc-40d5-a3ad-56d3b52a7b42 tempest-ServersTestJSON-536512511 tempest-ServersTestJSON-536512511-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=61962) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2168.082718] env[61962]: DEBUG oslo_concurrency.lockutils [None req-e364545b-7bcc-40d5-a3ad-56d3b52a7b42 tempest-ServersTestJSON-536512511 tempest-ServersTestJSON-536512511-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 0.000s {{(pid=61962) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2168.101344] env[61962]: DEBUG nova.virt.vmwareapi.images [None req-36219049-67ba-4c74-811b-90c296d8053e tempest-ImagesTestJSON-240028984 tempest-ImagesTestJSON-240028984-project-member] [instance: 85003695-cb3e-4bce-9a3a-a1d799b36369] Downloading image file data f684bb17-3ab2-4bd5-a19e-4c36c57d0ebe to the data store datastore2 {{(pid=61962) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:245}} [ 2168.162137] env[61962]: DEBUG oslo_vmware.rw_handles [None req-36219049-67ba-4c74-811b-90c296d8053e tempest-ImagesTestJSON-240028984 tempest-ImagesTestJSON-240028984-project-member] Creating HTTP connection to write to file with size = 21318656 and URL = https://esx7c1n2.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/f69c39b8-904d-4488-bafe-e749db7e711e/f684bb17-3ab2-4bd5-a19e-4c36c57d0ebe/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=61962) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 2168.222018] env[61962]: DEBUG oslo_vmware.rw_handles [None req-36219049-67ba-4c74-811b-90c296d8053e tempest-ImagesTestJSON-240028984 tempest-ImagesTestJSON-240028984-project-member] Completed reading data from the image iterator. {{(pid=61962) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 2168.222180] env[61962]: DEBUG oslo_vmware.rw_handles [None req-36219049-67ba-4c74-811b-90c296d8053e tempest-ImagesTestJSON-240028984 tempest-ImagesTestJSON-240028984-project-member] Closing write handle for https://esx7c1n2.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/f69c39b8-904d-4488-bafe-e749db7e711e/f684bb17-3ab2-4bd5-a19e-4c36c57d0ebe/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=61962) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:281}} [ 2168.256451] env[61962]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d65416eb-80d3-4505-8f6b-4e7a2ae307ea {{(pid=61962) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2168.264147] env[61962]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d5ddd324-fe4a-4d84-83ea-57afc3823ae5 {{(pid=61962) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2168.295147] env[61962]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0eabf56a-065b-4c68-9106-b2d4e5aa0edb {{(pid=61962) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2168.303605] env[61962]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0808e1c8-739d-426f-aa4e-7aa992a3fabf {{(pid=61962) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2168.317324] env[61962]: DEBUG nova.compute.provider_tree [None req-e364545b-7bcc-40d5-a3ad-56d3b52a7b42 tempest-ServersTestJSON-536512511 tempest-ServersTestJSON-536512511-project-member] Inventory has not changed in ProviderTree for provider: 5a20dc57-fddd-49ec-bab5-953a03eebaa1 {{(pid=61962) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2168.328736] env[61962]: DEBUG nova.scheduler.client.report [None req-e364545b-7bcc-40d5-a3ad-56d3b52a7b42 tempest-ServersTestJSON-536512511 tempest-ServersTestJSON-536512511-project-member] Inventory has not changed for provider 5a20dc57-fddd-49ec-bab5-953a03eebaa1 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 96, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61962) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 2168.342673] env[61962]: DEBUG oslo_concurrency.lockutils [None req-e364545b-7bcc-40d5-a3ad-56d3b52a7b42 tempest-ServersTestJSON-536512511 tempest-ServersTestJSON-536512511-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 0.260s {{(pid=61962) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2168.343202] env[61962]: ERROR nova.compute.manager [None req-e364545b-7bcc-40d5-a3ad-56d3b52a7b42 tempest-ServersTestJSON-536512511 tempest-ServersTestJSON-536512511-project-member] [instance: 02c198ea-61de-4e70-866c-39781d353d04] Failed to build and run instance: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 2168.343202] env[61962]: Faults: ['InvalidArgument'] [ 2168.343202] env[61962]: ERROR nova.compute.manager [instance: 02c198ea-61de-4e70-866c-39781d353d04] Traceback (most recent call last): [ 2168.343202] env[61962]: ERROR nova.compute.manager [instance: 02c198ea-61de-4e70-866c-39781d353d04] File "/opt/stack/nova/nova/compute/manager.py", line 2633, in _build_and_run_instance [ 2168.343202] env[61962]: ERROR nova.compute.manager [instance: 02c198ea-61de-4e70-866c-39781d353d04] self.driver.spawn(context, instance, image_meta, [ 2168.343202] env[61962]: ERROR nova.compute.manager [instance: 02c198ea-61de-4e70-866c-39781d353d04] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 2168.343202] env[61962]: ERROR nova.compute.manager [instance: 02c198ea-61de-4e70-866c-39781d353d04] self._vmops.spawn(context, instance, image_meta, injected_files, [ 2168.343202] env[61962]: ERROR nova.compute.manager [instance: 02c198ea-61de-4e70-866c-39781d353d04] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 2168.343202] env[61962]: ERROR nova.compute.manager [instance: 02c198ea-61de-4e70-866c-39781d353d04] self._fetch_image_if_missing(context, vi) [ 2168.343202] env[61962]: ERROR nova.compute.manager [instance: 02c198ea-61de-4e70-866c-39781d353d04] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 2168.343202] env[61962]: ERROR nova.compute.manager [instance: 02c198ea-61de-4e70-866c-39781d353d04] image_cache(vi, tmp_image_ds_loc) [ 2168.343202] env[61962]: ERROR nova.compute.manager [instance: 02c198ea-61de-4e70-866c-39781d353d04] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 2168.343621] env[61962]: ERROR nova.compute.manager [instance: 02c198ea-61de-4e70-866c-39781d353d04] vm_util.copy_virtual_disk( [ 2168.343621] env[61962]: ERROR nova.compute.manager [instance: 02c198ea-61de-4e70-866c-39781d353d04] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 2168.343621] env[61962]: ERROR nova.compute.manager [instance: 02c198ea-61de-4e70-866c-39781d353d04] session._wait_for_task(vmdk_copy_task) [ 2168.343621] env[61962]: ERROR nova.compute.manager [instance: 02c198ea-61de-4e70-866c-39781d353d04] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 2168.343621] env[61962]: ERROR nova.compute.manager [instance: 02c198ea-61de-4e70-866c-39781d353d04] return self.wait_for_task(task_ref) [ 2168.343621] env[61962]: ERROR nova.compute.manager [instance: 02c198ea-61de-4e70-866c-39781d353d04] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 2168.343621] env[61962]: ERROR nova.compute.manager [instance: 02c198ea-61de-4e70-866c-39781d353d04] return evt.wait() [ 2168.343621] env[61962]: ERROR nova.compute.manager [instance: 02c198ea-61de-4e70-866c-39781d353d04] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 2168.343621] env[61962]: ERROR nova.compute.manager [instance: 02c198ea-61de-4e70-866c-39781d353d04] result = hub.switch() [ 2168.343621] env[61962]: ERROR nova.compute.manager [instance: 02c198ea-61de-4e70-866c-39781d353d04] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 2168.343621] env[61962]: ERROR nova.compute.manager [instance: 02c198ea-61de-4e70-866c-39781d353d04] return self.greenlet.switch() [ 2168.343621] env[61962]: ERROR nova.compute.manager [instance: 02c198ea-61de-4e70-866c-39781d353d04] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 2168.343621] env[61962]: ERROR nova.compute.manager [instance: 02c198ea-61de-4e70-866c-39781d353d04] self.f(*self.args, **self.kw) [ 2168.344030] env[61962]: ERROR nova.compute.manager [instance: 02c198ea-61de-4e70-866c-39781d353d04] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 2168.344030] env[61962]: ERROR nova.compute.manager [instance: 02c198ea-61de-4e70-866c-39781d353d04] raise exceptions.translate_fault(task_info.error) [ 2168.344030] env[61962]: ERROR nova.compute.manager [instance: 02c198ea-61de-4e70-866c-39781d353d04] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 2168.344030] env[61962]: ERROR nova.compute.manager [instance: 02c198ea-61de-4e70-866c-39781d353d04] Faults: ['InvalidArgument'] [ 2168.344030] env[61962]: ERROR nova.compute.manager [instance: 02c198ea-61de-4e70-866c-39781d353d04] [ 2168.344030] env[61962]: DEBUG nova.compute.utils [None req-e364545b-7bcc-40d5-a3ad-56d3b52a7b42 tempest-ServersTestJSON-536512511 tempest-ServersTestJSON-536512511-project-member] [instance: 02c198ea-61de-4e70-866c-39781d353d04] VimFaultException {{(pid=61962) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 2168.345323] env[61962]: DEBUG nova.compute.manager [None req-e364545b-7bcc-40d5-a3ad-56d3b52a7b42 tempest-ServersTestJSON-536512511 tempest-ServersTestJSON-536512511-project-member] [instance: 02c198ea-61de-4e70-866c-39781d353d04] Build of instance 02c198ea-61de-4e70-866c-39781d353d04 was re-scheduled: A specified parameter was not correct: fileType [ 2168.345323] env[61962]: Faults: ['InvalidArgument'] {{(pid=61962) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2472}} [ 2168.345709] env[61962]: DEBUG nova.compute.manager [None req-e364545b-7bcc-40d5-a3ad-56d3b52a7b42 tempest-ServersTestJSON-536512511 tempest-ServersTestJSON-536512511-project-member] [instance: 02c198ea-61de-4e70-866c-39781d353d04] Unplugging VIFs for instance {{(pid=61962) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:2998}} [ 2168.345884] env[61962]: DEBUG nova.compute.manager [None req-e364545b-7bcc-40d5-a3ad-56d3b52a7b42 tempest-ServersTestJSON-536512511 tempest-ServersTestJSON-536512511-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=61962) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3021}} [ 2168.346073] env[61962]: DEBUG nova.compute.manager [None req-e364545b-7bcc-40d5-a3ad-56d3b52a7b42 tempest-ServersTestJSON-536512511 tempest-ServersTestJSON-536512511-project-member] [instance: 02c198ea-61de-4e70-866c-39781d353d04] Deallocating network for instance {{(pid=61962) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2281}} [ 2168.346244] env[61962]: DEBUG nova.network.neutron [None req-e364545b-7bcc-40d5-a3ad-56d3b52a7b42 tempest-ServersTestJSON-536512511 tempest-ServersTestJSON-536512511-project-member] [instance: 02c198ea-61de-4e70-866c-39781d353d04] deallocate_for_instance() {{(pid=61962) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 2168.652986] env[61962]: DEBUG nova.network.neutron [None req-e364545b-7bcc-40d5-a3ad-56d3b52a7b42 tempest-ServersTestJSON-536512511 tempest-ServersTestJSON-536512511-project-member] [instance: 02c198ea-61de-4e70-866c-39781d353d04] Updating instance_info_cache with network_info: [] {{(pid=61962) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2168.667290] env[61962]: INFO nova.compute.manager [None req-e364545b-7bcc-40d5-a3ad-56d3b52a7b42 tempest-ServersTestJSON-536512511 tempest-ServersTestJSON-536512511-project-member] [instance: 02c198ea-61de-4e70-866c-39781d353d04] Took 0.32 seconds to deallocate network for instance. [ 2168.761629] env[61962]: INFO nova.scheduler.client.report [None req-e364545b-7bcc-40d5-a3ad-56d3b52a7b42 tempest-ServersTestJSON-536512511 tempest-ServersTestJSON-536512511-project-member] Deleted allocations for instance 02c198ea-61de-4e70-866c-39781d353d04 [ 2168.784037] env[61962]: DEBUG oslo_concurrency.lockutils [None req-e364545b-7bcc-40d5-a3ad-56d3b52a7b42 tempest-ServersTestJSON-536512511 tempest-ServersTestJSON-536512511-project-member] Lock "02c198ea-61de-4e70-866c-39781d353d04" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 662.303s {{(pid=61962) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2168.784244] env[61962]: DEBUG oslo_concurrency.lockutils [None req-6f89abdd-04ea-4e19-b728-c96ff4a1545c tempest-ServersTestJSON-536512511 tempest-ServersTestJSON-536512511-project-member] Lock "02c198ea-61de-4e70-866c-39781d353d04" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 467.182s {{(pid=61962) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2168.784381] env[61962]: DEBUG oslo_concurrency.lockutils [None req-6f89abdd-04ea-4e19-b728-c96ff4a1545c tempest-ServersTestJSON-536512511 tempest-ServersTestJSON-536512511-project-member] Acquiring lock "02c198ea-61de-4e70-866c-39781d353d04-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=61962) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2168.784587] env[61962]: DEBUG oslo_concurrency.lockutils [None req-6f89abdd-04ea-4e19-b728-c96ff4a1545c tempest-ServersTestJSON-536512511 tempest-ServersTestJSON-536512511-project-member] Lock "02c198ea-61de-4e70-866c-39781d353d04-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=61962) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2168.784756] env[61962]: DEBUG oslo_concurrency.lockutils [None req-6f89abdd-04ea-4e19-b728-c96ff4a1545c tempest-ServersTestJSON-536512511 tempest-ServersTestJSON-536512511-project-member] Lock "02c198ea-61de-4e70-866c-39781d353d04-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=61962) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2168.786910] env[61962]: INFO nova.compute.manager [None req-6f89abdd-04ea-4e19-b728-c96ff4a1545c tempest-ServersTestJSON-536512511 tempest-ServersTestJSON-536512511-project-member] [instance: 02c198ea-61de-4e70-866c-39781d353d04] Terminating instance [ 2168.789034] env[61962]: DEBUG nova.compute.manager [None req-6f89abdd-04ea-4e19-b728-c96ff4a1545c tempest-ServersTestJSON-536512511 tempest-ServersTestJSON-536512511-project-member] [instance: 02c198ea-61de-4e70-866c-39781d353d04] Start destroying the instance on the hypervisor. {{(pid=61962) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3142}} [ 2168.789034] env[61962]: DEBUG nova.virt.vmwareapi.vmops [None req-6f89abdd-04ea-4e19-b728-c96ff4a1545c tempest-ServersTestJSON-536512511 tempest-ServersTestJSON-536512511-project-member] [instance: 02c198ea-61de-4e70-866c-39781d353d04] Destroying instance {{(pid=61962) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 2168.789640] env[61962]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-c0cb6752-1c7b-42fb-ae15-82c2bc83b9c0 {{(pid=61962) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2168.800309] env[61962]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d20e15f7-af99-4b64-8a0e-b927a27e3f67 {{(pid=61962) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2168.829542] env[61962]: WARNING nova.virt.vmwareapi.vmops [None req-6f89abdd-04ea-4e19-b728-c96ff4a1545c tempest-ServersTestJSON-536512511 tempest-ServersTestJSON-536512511-project-member] [instance: 02c198ea-61de-4e70-866c-39781d353d04] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 02c198ea-61de-4e70-866c-39781d353d04 could not be found. [ 2168.829776] env[61962]: DEBUG nova.virt.vmwareapi.vmops [None req-6f89abdd-04ea-4e19-b728-c96ff4a1545c tempest-ServersTestJSON-536512511 tempest-ServersTestJSON-536512511-project-member] [instance: 02c198ea-61de-4e70-866c-39781d353d04] Instance destroyed {{(pid=61962) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 2168.829961] env[61962]: INFO nova.compute.manager [None req-6f89abdd-04ea-4e19-b728-c96ff4a1545c tempest-ServersTestJSON-536512511 tempest-ServersTestJSON-536512511-project-member] [instance: 02c198ea-61de-4e70-866c-39781d353d04] Took 0.04 seconds to destroy the instance on the hypervisor. [ 2168.830225] env[61962]: DEBUG oslo.service.loopingcall [None req-6f89abdd-04ea-4e19-b728-c96ff4a1545c tempest-ServersTestJSON-536512511 tempest-ServersTestJSON-536512511-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61962) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 2168.830444] env[61962]: DEBUG nova.compute.manager [-] [instance: 02c198ea-61de-4e70-866c-39781d353d04] Deallocating network for instance {{(pid=61962) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2281}} [ 2168.830544] env[61962]: DEBUG nova.network.neutron [-] [instance: 02c198ea-61de-4e70-866c-39781d353d04] deallocate_for_instance() {{(pid=61962) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 2168.856563] env[61962]: DEBUG nova.network.neutron [-] [instance: 02c198ea-61de-4e70-866c-39781d353d04] Updating instance_info_cache with network_info: [] {{(pid=61962) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2168.866909] env[61962]: INFO nova.compute.manager [-] [instance: 02c198ea-61de-4e70-866c-39781d353d04] Took 0.04 seconds to deallocate network for instance. [ 2168.975125] env[61962]: DEBUG oslo_concurrency.lockutils [None req-6f89abdd-04ea-4e19-b728-c96ff4a1545c tempest-ServersTestJSON-536512511 tempest-ServersTestJSON-536512511-project-member] Lock "02c198ea-61de-4e70-866c-39781d353d04" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 0.191s {{(pid=61962) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2168.976016] env[61962]: DEBUG oslo_concurrency.lockutils [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Lock "02c198ea-61de-4e70-866c-39781d353d04" acquired by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: waited 431.456s {{(pid=61962) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2168.976555] env[61962]: INFO nova.compute.manager [None req-ba337279-4320-40ca-b616-7e590432f203 None None] [instance: 02c198ea-61de-4e70-866c-39781d353d04] During sync_power_state the instance has a pending task (deleting). Skip. [ 2168.976756] env[61962]: DEBUG oslo_concurrency.lockutils [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Lock "02c198ea-61de-4e70-866c-39781d353d04" "released" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: held 0.001s {{(pid=61962) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2195.158617] env[61962]: DEBUG oslo_service.periodic_task [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Running periodic task ComputeManager.update_available_resource {{(pid=61962) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2195.170545] env[61962]: DEBUG oslo_concurrency.lockutils [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=61962) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2195.170774] env[61962]: DEBUG oslo_concurrency.lockutils [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=61962) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2195.170975] env[61962]: DEBUG oslo_concurrency.lockutils [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=61962) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2195.171120] env[61962]: DEBUG nova.compute.resource_tracker [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=61962) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 2195.172250] env[61962]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-29b3213a-2607-43a5-bb66-c3fcb712c0bd {{(pid=61962) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2195.181362] env[61962]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-915e5a08-7fe9-4b4d-ab08-684eb3d0c71a {{(pid=61962) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2195.195971] env[61962]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c537eca7-e97f-426e-a2c5-9d0b55ea03d2 {{(pid=61962) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2195.202432] env[61962]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-faba1a6f-2769-4c29-817e-0144c6f833a8 {{(pid=61962) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2195.232401] env[61962]: DEBUG nova.compute.resource_tracker [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=180555MB free_disk=96GB free_vcpus=48 pci_devices=None {{(pid=61962) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 2195.232571] env[61962]: DEBUG oslo_concurrency.lockutils [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=61962) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2195.232740] env[61962]: DEBUG oslo_concurrency.lockutils [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=61962) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2195.284295] env[61962]: DEBUG nova.compute.resource_tracker [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Instance 85003695-cb3e-4bce-9a3a-a1d799b36369 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61962) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 2195.284459] env[61962]: DEBUG nova.compute.resource_tracker [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Instance 1a74c820-67e9-424c-b4e6-333db3449251 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61962) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 2195.284587] env[61962]: DEBUG nova.compute.resource_tracker [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Instance c2e03a3a-a3ac-4661-90f1-1e895a71b289 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61962) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 2195.284768] env[61962]: DEBUG nova.compute.resource_tracker [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Total usable vcpus: 48, total allocated vcpus: 3 {{(pid=61962) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 2195.284919] env[61962]: DEBUG nova.compute.resource_tracker [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=896MB phys_disk=100GB used_disk=3GB total_vcpus=48 used_vcpus=3 pci_stats=[] stats={'failed_builds': '2', 'num_instances': '3', 'num_vm_building': '3', 'num_task_deleting': '2', 'num_os_type_None': '3', 'num_proj_e58b68272c404983993a09ad12e8c03b': '1', 'io_workload': '3', 'num_proj_cc34536d51ef4a22aef0b274ba5aa28e': '1', 'num_task_spawning': '1', 'num_proj_d09904dce58b4cee83cebb57e6ad26a4': '1'} {{(pid=61962) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 2195.334077] env[61962]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9b428ca7-e4c4-41ae-a0a7-0406bb5350ff {{(pid=61962) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2195.343231] env[61962]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-09ab20aa-b89d-4c00-94eb-9a42ef4515dd {{(pid=61962) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2195.372816] env[61962]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3b22dea8-21cf-4d87-9473-7ff2ddd4ec6e {{(pid=61962) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2195.380307] env[61962]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e84b0a66-0b7a-4288-af14-91d3e783e7e0 {{(pid=61962) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2195.393716] env[61962]: DEBUG nova.compute.provider_tree [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Inventory has not changed in ProviderTree for provider: 5a20dc57-fddd-49ec-bab5-953a03eebaa1 {{(pid=61962) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2195.401974] env[61962]: DEBUG nova.scheduler.client.report [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Inventory has not changed for provider 5a20dc57-fddd-49ec-bab5-953a03eebaa1 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 96, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61962) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 2195.415401] env[61962]: DEBUG nova.compute.resource_tracker [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=61962) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 2195.415568] env[61962]: DEBUG oslo_concurrency.lockutils [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 0.183s {{(pid=61962) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2198.416711] env[61962]: DEBUG oslo_service.periodic_task [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=61962) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2198.417117] env[61962]: DEBUG nova.compute.manager [None req-ba337279-4320-40ca-b616-7e590432f203 None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=61962) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10626}} [ 2204.154884] env[61962]: DEBUG oslo_service.periodic_task [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=61962) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2206.160724] env[61962]: DEBUG oslo_service.periodic_task [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=61962) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2207.158226] env[61962]: DEBUG oslo_service.periodic_task [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=61962) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2208.159177] env[61962]: DEBUG oslo_service.periodic_task [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=61962) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2208.159529] env[61962]: DEBUG nova.compute.manager [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Starting heal instance info cache {{(pid=61962) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10007}} [ 2208.159529] env[61962]: DEBUG nova.compute.manager [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Rebuilding the list of instances to heal {{(pid=61962) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10011}} [ 2208.173139] env[61962]: DEBUG nova.compute.manager [None req-ba337279-4320-40ca-b616-7e590432f203 None None] [instance: 85003695-cb3e-4bce-9a3a-a1d799b36369] Skipping network cache update for instance because it is Building. {{(pid=61962) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10020}} [ 2208.173324] env[61962]: DEBUG nova.compute.manager [None req-ba337279-4320-40ca-b616-7e590432f203 None None] [instance: 1a74c820-67e9-424c-b4e6-333db3449251] Skipping network cache update for instance because it is Building. {{(pid=61962) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10020}} [ 2208.173467] env[61962]: DEBUG nova.compute.manager [None req-ba337279-4320-40ca-b616-7e590432f203 None None] [instance: c2e03a3a-a3ac-4661-90f1-1e895a71b289] Skipping network cache update for instance because it is Building. {{(pid=61962) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10020}} [ 2208.173547] env[61962]: DEBUG nova.compute.manager [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Didn't find any instances for network info cache update. {{(pid=61962) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10093}} [ 2210.169441] env[61962]: DEBUG oslo_service.periodic_task [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Running periodic task ComputeManager._sync_scheduler_instance_info {{(pid=61962) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2211.159066] env[61962]: DEBUG oslo_service.periodic_task [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=61962) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2211.159311] env[61962]: DEBUG oslo_service.periodic_task [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=61962) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2212.159525] env[61962]: DEBUG oslo_service.periodic_task [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=61962) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2217.186035] env[61962]: WARNING oslo_vmware.rw_handles [None req-36219049-67ba-4c74-811b-90c296d8053e tempest-ImagesTestJSON-240028984 tempest-ImagesTestJSON-240028984-project-member] Error occurred while reading the HTTP response.: http.client.RemoteDisconnected: Remote end closed connection without response [ 2217.186035] env[61962]: ERROR oslo_vmware.rw_handles Traceback (most recent call last): [ 2217.186035] env[61962]: ERROR oslo_vmware.rw_handles File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py", line 283, in close [ 2217.186035] env[61962]: ERROR oslo_vmware.rw_handles self._conn.getresponse() [ 2217.186035] env[61962]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 1375, in getresponse [ 2217.186035] env[61962]: ERROR oslo_vmware.rw_handles response.begin() [ 2217.186035] env[61962]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 318, in begin [ 2217.186035] env[61962]: ERROR oslo_vmware.rw_handles version, status, reason = self._read_status() [ 2217.186035] env[61962]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 287, in _read_status [ 2217.186035] env[61962]: ERROR oslo_vmware.rw_handles raise RemoteDisconnected("Remote end closed connection without" [ 2217.186035] env[61962]: ERROR oslo_vmware.rw_handles http.client.RemoteDisconnected: Remote end closed connection without response [ 2217.186035] env[61962]: ERROR oslo_vmware.rw_handles [ 2217.186896] env[61962]: DEBUG nova.virt.vmwareapi.images [None req-36219049-67ba-4c74-811b-90c296d8053e tempest-ImagesTestJSON-240028984 tempest-ImagesTestJSON-240028984-project-member] [instance: 85003695-cb3e-4bce-9a3a-a1d799b36369] Downloaded image file data f684bb17-3ab2-4bd5-a19e-4c36c57d0ebe to vmware_temp/f69c39b8-904d-4488-bafe-e749db7e711e/f684bb17-3ab2-4bd5-a19e-4c36c57d0ebe/tmp-sparse.vmdk on the data store datastore2 {{(pid=61962) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:258}} [ 2217.188291] env[61962]: DEBUG nova.virt.vmwareapi.vmops [None req-36219049-67ba-4c74-811b-90c296d8053e tempest-ImagesTestJSON-240028984 tempest-ImagesTestJSON-240028984-project-member] [instance: 85003695-cb3e-4bce-9a3a-a1d799b36369] Caching image {{(pid=61962) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 2217.188568] env[61962]: DEBUG nova.virt.vmwareapi.vm_util [None req-36219049-67ba-4c74-811b-90c296d8053e tempest-ImagesTestJSON-240028984 tempest-ImagesTestJSON-240028984-project-member] Copying Virtual Disk [datastore2] vmware_temp/f69c39b8-904d-4488-bafe-e749db7e711e/f684bb17-3ab2-4bd5-a19e-4c36c57d0ebe/tmp-sparse.vmdk to [datastore2] vmware_temp/f69c39b8-904d-4488-bafe-e749db7e711e/f684bb17-3ab2-4bd5-a19e-4c36c57d0ebe/f684bb17-3ab2-4bd5-a19e-4c36c57d0ebe.vmdk {{(pid=61962) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 2217.188885] env[61962]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-9bd110e8-8f43-4607-9ab5-3a2acfcfeafe {{(pid=61962) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2217.198381] env[61962]: DEBUG oslo_vmware.api [None req-36219049-67ba-4c74-811b-90c296d8053e tempest-ImagesTestJSON-240028984 tempest-ImagesTestJSON-240028984-project-member] Waiting for the task: (returnval){ [ 2217.198381] env[61962]: value = "task-4892132" [ 2217.198381] env[61962]: _type = "Task" [ 2217.198381] env[61962]: } to complete. {{(pid=61962) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2217.206897] env[61962]: DEBUG oslo_vmware.api [None req-36219049-67ba-4c74-811b-90c296d8053e tempest-ImagesTestJSON-240028984 tempest-ImagesTestJSON-240028984-project-member] Task: {'id': task-4892132, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61962) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2217.709181] env[61962]: DEBUG oslo_vmware.exceptions [None req-36219049-67ba-4c74-811b-90c296d8053e tempest-ImagesTestJSON-240028984 tempest-ImagesTestJSON-240028984-project-member] Fault InvalidArgument not matched. {{(pid=61962) get_fault_class /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/exceptions.py:290}} [ 2217.709457] env[61962]: DEBUG oslo_concurrency.lockutils [None req-36219049-67ba-4c74-811b-90c296d8053e tempest-ImagesTestJSON-240028984 tempest-ImagesTestJSON-240028984-project-member] Releasing lock "[datastore2] devstack-image-cache_base/f684bb17-3ab2-4bd5-a19e-4c36c57d0ebe/f684bb17-3ab2-4bd5-a19e-4c36c57d0ebe.vmdk" {{(pid=61962) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2217.710027] env[61962]: ERROR nova.compute.manager [None req-36219049-67ba-4c74-811b-90c296d8053e tempest-ImagesTestJSON-240028984 tempest-ImagesTestJSON-240028984-project-member] [instance: 85003695-cb3e-4bce-9a3a-a1d799b36369] Instance failed to spawn: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 2217.710027] env[61962]: Faults: ['InvalidArgument'] [ 2217.710027] env[61962]: ERROR nova.compute.manager [instance: 85003695-cb3e-4bce-9a3a-a1d799b36369] Traceback (most recent call last): [ 2217.710027] env[61962]: ERROR nova.compute.manager [instance: 85003695-cb3e-4bce-9a3a-a1d799b36369] File "/opt/stack/nova/nova/compute/manager.py", line 2886, in _build_resources [ 2217.710027] env[61962]: ERROR nova.compute.manager [instance: 85003695-cb3e-4bce-9a3a-a1d799b36369] yield resources [ 2217.710027] env[61962]: ERROR nova.compute.manager [instance: 85003695-cb3e-4bce-9a3a-a1d799b36369] File "/opt/stack/nova/nova/compute/manager.py", line 2633, in _build_and_run_instance [ 2217.710027] env[61962]: ERROR nova.compute.manager [instance: 85003695-cb3e-4bce-9a3a-a1d799b36369] self.driver.spawn(context, instance, image_meta, [ 2217.710027] env[61962]: ERROR nova.compute.manager [instance: 85003695-cb3e-4bce-9a3a-a1d799b36369] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 2217.710027] env[61962]: ERROR nova.compute.manager [instance: 85003695-cb3e-4bce-9a3a-a1d799b36369] self._vmops.spawn(context, instance, image_meta, injected_files, [ 2217.710027] env[61962]: ERROR nova.compute.manager [instance: 85003695-cb3e-4bce-9a3a-a1d799b36369] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 2217.710027] env[61962]: ERROR nova.compute.manager [instance: 85003695-cb3e-4bce-9a3a-a1d799b36369] self._fetch_image_if_missing(context, vi) [ 2217.710027] env[61962]: ERROR nova.compute.manager [instance: 85003695-cb3e-4bce-9a3a-a1d799b36369] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 2217.710027] env[61962]: ERROR nova.compute.manager [instance: 85003695-cb3e-4bce-9a3a-a1d799b36369] image_cache(vi, tmp_image_ds_loc) [ 2217.710566] env[61962]: ERROR nova.compute.manager [instance: 85003695-cb3e-4bce-9a3a-a1d799b36369] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 2217.710566] env[61962]: ERROR nova.compute.manager [instance: 85003695-cb3e-4bce-9a3a-a1d799b36369] vm_util.copy_virtual_disk( [ 2217.710566] env[61962]: ERROR nova.compute.manager [instance: 85003695-cb3e-4bce-9a3a-a1d799b36369] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 2217.710566] env[61962]: ERROR nova.compute.manager [instance: 85003695-cb3e-4bce-9a3a-a1d799b36369] session._wait_for_task(vmdk_copy_task) [ 2217.710566] env[61962]: ERROR nova.compute.manager [instance: 85003695-cb3e-4bce-9a3a-a1d799b36369] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 2217.710566] env[61962]: ERROR nova.compute.manager [instance: 85003695-cb3e-4bce-9a3a-a1d799b36369] return self.wait_for_task(task_ref) [ 2217.710566] env[61962]: ERROR nova.compute.manager [instance: 85003695-cb3e-4bce-9a3a-a1d799b36369] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 2217.710566] env[61962]: ERROR nova.compute.manager [instance: 85003695-cb3e-4bce-9a3a-a1d799b36369] return evt.wait() [ 2217.710566] env[61962]: ERROR nova.compute.manager [instance: 85003695-cb3e-4bce-9a3a-a1d799b36369] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 2217.710566] env[61962]: ERROR nova.compute.manager [instance: 85003695-cb3e-4bce-9a3a-a1d799b36369] result = hub.switch() [ 2217.710566] env[61962]: ERROR nova.compute.manager [instance: 85003695-cb3e-4bce-9a3a-a1d799b36369] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 2217.710566] env[61962]: ERROR nova.compute.manager [instance: 85003695-cb3e-4bce-9a3a-a1d799b36369] return self.greenlet.switch() [ 2217.710566] env[61962]: ERROR nova.compute.manager [instance: 85003695-cb3e-4bce-9a3a-a1d799b36369] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 2217.711023] env[61962]: ERROR nova.compute.manager [instance: 85003695-cb3e-4bce-9a3a-a1d799b36369] self.f(*self.args, **self.kw) [ 2217.711023] env[61962]: ERROR nova.compute.manager [instance: 85003695-cb3e-4bce-9a3a-a1d799b36369] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 2217.711023] env[61962]: ERROR nova.compute.manager [instance: 85003695-cb3e-4bce-9a3a-a1d799b36369] raise exceptions.translate_fault(task_info.error) [ 2217.711023] env[61962]: ERROR nova.compute.manager [instance: 85003695-cb3e-4bce-9a3a-a1d799b36369] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 2217.711023] env[61962]: ERROR nova.compute.manager [instance: 85003695-cb3e-4bce-9a3a-a1d799b36369] Faults: ['InvalidArgument'] [ 2217.711023] env[61962]: ERROR nova.compute.manager [instance: 85003695-cb3e-4bce-9a3a-a1d799b36369] [ 2217.711023] env[61962]: INFO nova.compute.manager [None req-36219049-67ba-4c74-811b-90c296d8053e tempest-ImagesTestJSON-240028984 tempest-ImagesTestJSON-240028984-project-member] [instance: 85003695-cb3e-4bce-9a3a-a1d799b36369] Terminating instance [ 2217.711954] env[61962]: DEBUG oslo_concurrency.lockutils [None req-baa26604-ba56-4dd3-8c26-a36534ca2fce tempest-ServersTestJSON-536512511 tempest-ServersTestJSON-536512511-project-member] Acquired lock "[datastore2] devstack-image-cache_base/f684bb17-3ab2-4bd5-a19e-4c36c57d0ebe/f684bb17-3ab2-4bd5-a19e-4c36c57d0ebe.vmdk" {{(pid=61962) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2217.713169] env[61962]: DEBUG nova.virt.vmwareapi.ds_util [None req-baa26604-ba56-4dd3-8c26-a36534ca2fce tempest-ServersTestJSON-536512511 tempest-ServersTestJSON-536512511-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=61962) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 2217.713870] env[61962]: DEBUG nova.compute.manager [None req-36219049-67ba-4c74-811b-90c296d8053e tempest-ImagesTestJSON-240028984 tempest-ImagesTestJSON-240028984-project-member] [instance: 85003695-cb3e-4bce-9a3a-a1d799b36369] Start destroying the instance on the hypervisor. {{(pid=61962) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3142}} [ 2217.714116] env[61962]: DEBUG nova.virt.vmwareapi.vmops [None req-36219049-67ba-4c74-811b-90c296d8053e tempest-ImagesTestJSON-240028984 tempest-ImagesTestJSON-240028984-project-member] [instance: 85003695-cb3e-4bce-9a3a-a1d799b36369] Destroying instance {{(pid=61962) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 2217.714374] env[61962]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-ede4e91f-e89f-459a-a765-045881d30524 {{(pid=61962) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2217.716814] env[61962]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bd93f448-f7a3-4c32-94c9-1495b13ee3d5 {{(pid=61962) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2217.724245] env[61962]: DEBUG nova.virt.vmwareapi.vmops [None req-36219049-67ba-4c74-811b-90c296d8053e tempest-ImagesTestJSON-240028984 tempest-ImagesTestJSON-240028984-project-member] [instance: 85003695-cb3e-4bce-9a3a-a1d799b36369] Unregistering the VM {{(pid=61962) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 2217.724479] env[61962]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-3c8245b7-98b7-4be2-b846-a118143c5602 {{(pid=61962) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2217.726762] env[61962]: DEBUG nova.virt.vmwareapi.ds_util [None req-baa26604-ba56-4dd3-8c26-a36534ca2fce tempest-ServersTestJSON-536512511 tempest-ServersTestJSON-536512511-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=61962) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 2217.726937] env[61962]: DEBUG nova.virt.vmwareapi.vmops [None req-baa26604-ba56-4dd3-8c26-a36534ca2fce tempest-ServersTestJSON-536512511 tempest-ServersTestJSON-536512511-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=61962) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 2217.727896] env[61962]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-903e22d6-bb75-4c2d-b13c-12c44a828c25 {{(pid=61962) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2217.732805] env[61962]: DEBUG oslo_vmware.api [None req-baa26604-ba56-4dd3-8c26-a36534ca2fce tempest-ServersTestJSON-536512511 tempest-ServersTestJSON-536512511-project-member] Waiting for the task: (returnval){ [ 2217.732805] env[61962]: value = "session[5210918f-aadc-9b29-42fa-0929c2e42627]523d921d-be4c-32a0-ce06-3fd89ed93906" [ 2217.732805] env[61962]: _type = "Task" [ 2217.732805] env[61962]: } to complete. {{(pid=61962) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2217.741529] env[61962]: DEBUG oslo_vmware.api [None req-baa26604-ba56-4dd3-8c26-a36534ca2fce tempest-ServersTestJSON-536512511 tempest-ServersTestJSON-536512511-project-member] Task: {'id': session[5210918f-aadc-9b29-42fa-0929c2e42627]523d921d-be4c-32a0-ce06-3fd89ed93906, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61962) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2217.800969] env[61962]: DEBUG nova.virt.vmwareapi.vmops [None req-36219049-67ba-4c74-811b-90c296d8053e tempest-ImagesTestJSON-240028984 tempest-ImagesTestJSON-240028984-project-member] [instance: 85003695-cb3e-4bce-9a3a-a1d799b36369] Unregistered the VM {{(pid=61962) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 2217.801239] env[61962]: DEBUG nova.virt.vmwareapi.vmops [None req-36219049-67ba-4c74-811b-90c296d8053e tempest-ImagesTestJSON-240028984 tempest-ImagesTestJSON-240028984-project-member] [instance: 85003695-cb3e-4bce-9a3a-a1d799b36369] Deleting contents of the VM from datastore datastore2 {{(pid=61962) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 2217.801405] env[61962]: DEBUG nova.virt.vmwareapi.ds_util [None req-36219049-67ba-4c74-811b-90c296d8053e tempest-ImagesTestJSON-240028984 tempest-ImagesTestJSON-240028984-project-member] Deleting the datastore file [datastore2] 85003695-cb3e-4bce-9a3a-a1d799b36369 {{(pid=61962) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 2217.801682] env[61962]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-637701b7-c150-4b55-8538-2a43d7ec5e1b {{(pid=61962) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2217.808261] env[61962]: DEBUG oslo_vmware.api [None req-36219049-67ba-4c74-811b-90c296d8053e tempest-ImagesTestJSON-240028984 tempest-ImagesTestJSON-240028984-project-member] Waiting for the task: (returnval){ [ 2217.808261] env[61962]: value = "task-4892134" [ 2217.808261] env[61962]: _type = "Task" [ 2217.808261] env[61962]: } to complete. {{(pid=61962) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2217.816528] env[61962]: DEBUG oslo_vmware.api [None req-36219049-67ba-4c74-811b-90c296d8053e tempest-ImagesTestJSON-240028984 tempest-ImagesTestJSON-240028984-project-member] Task: {'id': task-4892134, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61962) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2218.243746] env[61962]: DEBUG nova.virt.vmwareapi.vmops [None req-baa26604-ba56-4dd3-8c26-a36534ca2fce tempest-ServersTestJSON-536512511 tempest-ServersTestJSON-536512511-project-member] [instance: 1a74c820-67e9-424c-b4e6-333db3449251] Preparing fetch location {{(pid=61962) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 2218.244138] env[61962]: DEBUG nova.virt.vmwareapi.ds_util [None req-baa26604-ba56-4dd3-8c26-a36534ca2fce tempest-ServersTestJSON-536512511 tempest-ServersTestJSON-536512511-project-member] Creating directory with path [datastore2] vmware_temp/7c19fa3e-55f1-4a02-b989-fd81f811a060/f684bb17-3ab2-4bd5-a19e-4c36c57d0ebe {{(pid=61962) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 2218.244239] env[61962]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-17181666-8742-4a2f-a971-74017c6a9971 {{(pid=61962) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2218.255767] env[61962]: DEBUG nova.virt.vmwareapi.ds_util [None req-baa26604-ba56-4dd3-8c26-a36534ca2fce tempest-ServersTestJSON-536512511 tempest-ServersTestJSON-536512511-project-member] Created directory with path [datastore2] vmware_temp/7c19fa3e-55f1-4a02-b989-fd81f811a060/f684bb17-3ab2-4bd5-a19e-4c36c57d0ebe {{(pid=61962) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 2218.255973] env[61962]: DEBUG nova.virt.vmwareapi.vmops [None req-baa26604-ba56-4dd3-8c26-a36534ca2fce tempest-ServersTestJSON-536512511 tempest-ServersTestJSON-536512511-project-member] [instance: 1a74c820-67e9-424c-b4e6-333db3449251] Fetch image to [datastore2] vmware_temp/7c19fa3e-55f1-4a02-b989-fd81f811a060/f684bb17-3ab2-4bd5-a19e-4c36c57d0ebe/tmp-sparse.vmdk {{(pid=61962) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 2218.256140] env[61962]: DEBUG nova.virt.vmwareapi.vmops [None req-baa26604-ba56-4dd3-8c26-a36534ca2fce tempest-ServersTestJSON-536512511 tempest-ServersTestJSON-536512511-project-member] [instance: 1a74c820-67e9-424c-b4e6-333db3449251] Downloading image file data f684bb17-3ab2-4bd5-a19e-4c36c57d0ebe to [datastore2] vmware_temp/7c19fa3e-55f1-4a02-b989-fd81f811a060/f684bb17-3ab2-4bd5-a19e-4c36c57d0ebe/tmp-sparse.vmdk on the data store datastore2 {{(pid=61962) _fetch_image_as_file /opt/stack/nova/nova/virt/vmwareapi/vmops.py:399}} [ 2218.256868] env[61962]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2e9a230b-c577-435e-884a-5da4e9a2b1d6 {{(pid=61962) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2218.263706] env[61962]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9d0819ad-ca2b-4616-b461-e34e917a328d {{(pid=61962) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2218.272778] env[61962]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-73fdbccc-19d1-4f89-8973-b0c3a8732136 {{(pid=61962) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2218.302459] env[61962]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d61a2941-337b-4833-a464-5f3b2dc948ae {{(pid=61962) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2218.308275] env[61962]: DEBUG oslo_vmware.service [-] Invoking SessionManager.AcquireGenericServiceTicket with opID=oslo.vmware-43a24824-519c-44d4-9f30-1611e53fc079 {{(pid=61962) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2218.318538] env[61962]: DEBUG oslo_vmware.api [None req-36219049-67ba-4c74-811b-90c296d8053e tempest-ImagesTestJSON-240028984 tempest-ImagesTestJSON-240028984-project-member] Task: {'id': task-4892134, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.07471} completed successfully. {{(pid=61962) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2218.318759] env[61962]: DEBUG nova.virt.vmwareapi.ds_util [None req-36219049-67ba-4c74-811b-90c296d8053e tempest-ImagesTestJSON-240028984 tempest-ImagesTestJSON-240028984-project-member] Deleted the datastore file {{(pid=61962) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 2218.318934] env[61962]: DEBUG nova.virt.vmwareapi.vmops [None req-36219049-67ba-4c74-811b-90c296d8053e tempest-ImagesTestJSON-240028984 tempest-ImagesTestJSON-240028984-project-member] [instance: 85003695-cb3e-4bce-9a3a-a1d799b36369] Deleted contents of the VM from datastore datastore2 {{(pid=61962) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 2218.319116] env[61962]: DEBUG nova.virt.vmwareapi.vmops [None req-36219049-67ba-4c74-811b-90c296d8053e tempest-ImagesTestJSON-240028984 tempest-ImagesTestJSON-240028984-project-member] [instance: 85003695-cb3e-4bce-9a3a-a1d799b36369] Instance destroyed {{(pid=61962) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 2218.319289] env[61962]: INFO nova.compute.manager [None req-36219049-67ba-4c74-811b-90c296d8053e tempest-ImagesTestJSON-240028984 tempest-ImagesTestJSON-240028984-project-member] [instance: 85003695-cb3e-4bce-9a3a-a1d799b36369] Took 0.61 seconds to destroy the instance on the hypervisor. [ 2218.321589] env[61962]: DEBUG nova.compute.claims [None req-36219049-67ba-4c74-811b-90c296d8053e tempest-ImagesTestJSON-240028984 tempest-ImagesTestJSON-240028984-project-member] [instance: 85003695-cb3e-4bce-9a3a-a1d799b36369] Aborting claim: {{(pid=61962) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 2218.321720] env[61962]: DEBUG oslo_concurrency.lockutils [None req-36219049-67ba-4c74-811b-90c296d8053e tempest-ImagesTestJSON-240028984 tempest-ImagesTestJSON-240028984-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=61962) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2218.321926] env[61962]: DEBUG oslo_concurrency.lockutils [None req-36219049-67ba-4c74-811b-90c296d8053e tempest-ImagesTestJSON-240028984 tempest-ImagesTestJSON-240028984-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 0.000s {{(pid=61962) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2218.337576] env[61962]: DEBUG nova.virt.vmwareapi.images [None req-baa26604-ba56-4dd3-8c26-a36534ca2fce tempest-ServersTestJSON-536512511 tempest-ServersTestJSON-536512511-project-member] [instance: 1a74c820-67e9-424c-b4e6-333db3449251] Downloading image file data f684bb17-3ab2-4bd5-a19e-4c36c57d0ebe to the data store datastore2 {{(pid=61962) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:245}} [ 2218.390661] env[61962]: DEBUG oslo_vmware.rw_handles [None req-baa26604-ba56-4dd3-8c26-a36534ca2fce tempest-ServersTestJSON-536512511 tempest-ServersTestJSON-536512511-project-member] Creating HTTP connection to write to file with size = 21318656 and URL = https://esx7c2n1.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/7c19fa3e-55f1-4a02-b989-fd81f811a060/f684bb17-3ab2-4bd5-a19e-4c36c57d0ebe/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=61962) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 2218.454836] env[61962]: DEBUG oslo_vmware.rw_handles [None req-baa26604-ba56-4dd3-8c26-a36534ca2fce tempest-ServersTestJSON-536512511 tempest-ServersTestJSON-536512511-project-member] Completed reading data from the image iterator. {{(pid=61962) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 2218.454836] env[61962]: DEBUG oslo_vmware.rw_handles [None req-baa26604-ba56-4dd3-8c26-a36534ca2fce tempest-ServersTestJSON-536512511 tempest-ServersTestJSON-536512511-project-member] Closing write handle for https://esx7c2n1.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/7c19fa3e-55f1-4a02-b989-fd81f811a060/f684bb17-3ab2-4bd5-a19e-4c36c57d0ebe/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=61962) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:281}} [ 2218.463228] env[61962]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-58038efd-22e4-4869-9612-bc9fc906fc31 {{(pid=61962) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2218.471586] env[61962]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f233cf63-30b7-49c0-99ba-d96aad4c8f8b {{(pid=61962) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2218.501899] env[61962]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-452c8f59-2975-402f-afec-d4d9e62fc3d5 {{(pid=61962) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2218.510257] env[61962]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b19603cb-9ea3-4ced-bef6-c44e3e6e4278 {{(pid=61962) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2218.524202] env[61962]: DEBUG nova.compute.provider_tree [None req-36219049-67ba-4c74-811b-90c296d8053e tempest-ImagesTestJSON-240028984 tempest-ImagesTestJSON-240028984-project-member] Inventory has not changed in ProviderTree for provider: 5a20dc57-fddd-49ec-bab5-953a03eebaa1 {{(pid=61962) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2218.534755] env[61962]: DEBUG nova.scheduler.client.report [None req-36219049-67ba-4c74-811b-90c296d8053e tempest-ImagesTestJSON-240028984 tempest-ImagesTestJSON-240028984-project-member] Inventory has not changed for provider 5a20dc57-fddd-49ec-bab5-953a03eebaa1 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 96, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61962) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 2218.546612] env[61962]: DEBUG oslo_concurrency.lockutils [None req-36219049-67ba-4c74-811b-90c296d8053e tempest-ImagesTestJSON-240028984 tempest-ImagesTestJSON-240028984-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 0.225s {{(pid=61962) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2218.547156] env[61962]: ERROR nova.compute.manager [None req-36219049-67ba-4c74-811b-90c296d8053e tempest-ImagesTestJSON-240028984 tempest-ImagesTestJSON-240028984-project-member] [instance: 85003695-cb3e-4bce-9a3a-a1d799b36369] Failed to build and run instance: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 2218.547156] env[61962]: Faults: ['InvalidArgument'] [ 2218.547156] env[61962]: ERROR nova.compute.manager [instance: 85003695-cb3e-4bce-9a3a-a1d799b36369] Traceback (most recent call last): [ 2218.547156] env[61962]: ERROR nova.compute.manager [instance: 85003695-cb3e-4bce-9a3a-a1d799b36369] File "/opt/stack/nova/nova/compute/manager.py", line 2633, in _build_and_run_instance [ 2218.547156] env[61962]: ERROR nova.compute.manager [instance: 85003695-cb3e-4bce-9a3a-a1d799b36369] self.driver.spawn(context, instance, image_meta, [ 2218.547156] env[61962]: ERROR nova.compute.manager [instance: 85003695-cb3e-4bce-9a3a-a1d799b36369] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 2218.547156] env[61962]: ERROR nova.compute.manager [instance: 85003695-cb3e-4bce-9a3a-a1d799b36369] self._vmops.spawn(context, instance, image_meta, injected_files, [ 2218.547156] env[61962]: ERROR nova.compute.manager [instance: 85003695-cb3e-4bce-9a3a-a1d799b36369] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 2218.547156] env[61962]: ERROR nova.compute.manager [instance: 85003695-cb3e-4bce-9a3a-a1d799b36369] self._fetch_image_if_missing(context, vi) [ 2218.547156] env[61962]: ERROR nova.compute.manager [instance: 85003695-cb3e-4bce-9a3a-a1d799b36369] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 2218.547156] env[61962]: ERROR nova.compute.manager [instance: 85003695-cb3e-4bce-9a3a-a1d799b36369] image_cache(vi, tmp_image_ds_loc) [ 2218.547156] env[61962]: ERROR nova.compute.manager [instance: 85003695-cb3e-4bce-9a3a-a1d799b36369] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 2218.547537] env[61962]: ERROR nova.compute.manager [instance: 85003695-cb3e-4bce-9a3a-a1d799b36369] vm_util.copy_virtual_disk( [ 2218.547537] env[61962]: ERROR nova.compute.manager [instance: 85003695-cb3e-4bce-9a3a-a1d799b36369] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 2218.547537] env[61962]: ERROR nova.compute.manager [instance: 85003695-cb3e-4bce-9a3a-a1d799b36369] session._wait_for_task(vmdk_copy_task) [ 2218.547537] env[61962]: ERROR nova.compute.manager [instance: 85003695-cb3e-4bce-9a3a-a1d799b36369] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 2218.547537] env[61962]: ERROR nova.compute.manager [instance: 85003695-cb3e-4bce-9a3a-a1d799b36369] return self.wait_for_task(task_ref) [ 2218.547537] env[61962]: ERROR nova.compute.manager [instance: 85003695-cb3e-4bce-9a3a-a1d799b36369] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 2218.547537] env[61962]: ERROR nova.compute.manager [instance: 85003695-cb3e-4bce-9a3a-a1d799b36369] return evt.wait() [ 2218.547537] env[61962]: ERROR nova.compute.manager [instance: 85003695-cb3e-4bce-9a3a-a1d799b36369] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 2218.547537] env[61962]: ERROR nova.compute.manager [instance: 85003695-cb3e-4bce-9a3a-a1d799b36369] result = hub.switch() [ 2218.547537] env[61962]: ERROR nova.compute.manager [instance: 85003695-cb3e-4bce-9a3a-a1d799b36369] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 2218.547537] env[61962]: ERROR nova.compute.manager [instance: 85003695-cb3e-4bce-9a3a-a1d799b36369] return self.greenlet.switch() [ 2218.547537] env[61962]: ERROR nova.compute.manager [instance: 85003695-cb3e-4bce-9a3a-a1d799b36369] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 2218.547537] env[61962]: ERROR nova.compute.manager [instance: 85003695-cb3e-4bce-9a3a-a1d799b36369] self.f(*self.args, **self.kw) [ 2218.547897] env[61962]: ERROR nova.compute.manager [instance: 85003695-cb3e-4bce-9a3a-a1d799b36369] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 2218.547897] env[61962]: ERROR nova.compute.manager [instance: 85003695-cb3e-4bce-9a3a-a1d799b36369] raise exceptions.translate_fault(task_info.error) [ 2218.547897] env[61962]: ERROR nova.compute.manager [instance: 85003695-cb3e-4bce-9a3a-a1d799b36369] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 2218.547897] env[61962]: ERROR nova.compute.manager [instance: 85003695-cb3e-4bce-9a3a-a1d799b36369] Faults: ['InvalidArgument'] [ 2218.547897] env[61962]: ERROR nova.compute.manager [instance: 85003695-cb3e-4bce-9a3a-a1d799b36369] [ 2218.547897] env[61962]: DEBUG nova.compute.utils [None req-36219049-67ba-4c74-811b-90c296d8053e tempest-ImagesTestJSON-240028984 tempest-ImagesTestJSON-240028984-project-member] [instance: 85003695-cb3e-4bce-9a3a-a1d799b36369] VimFaultException {{(pid=61962) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 2218.549307] env[61962]: DEBUG nova.compute.manager [None req-36219049-67ba-4c74-811b-90c296d8053e tempest-ImagesTestJSON-240028984 tempest-ImagesTestJSON-240028984-project-member] [instance: 85003695-cb3e-4bce-9a3a-a1d799b36369] Build of instance 85003695-cb3e-4bce-9a3a-a1d799b36369 was re-scheduled: A specified parameter was not correct: fileType [ 2218.549307] env[61962]: Faults: ['InvalidArgument'] {{(pid=61962) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2472}} [ 2218.549666] env[61962]: DEBUG nova.compute.manager [None req-36219049-67ba-4c74-811b-90c296d8053e tempest-ImagesTestJSON-240028984 tempest-ImagesTestJSON-240028984-project-member] [instance: 85003695-cb3e-4bce-9a3a-a1d799b36369] Unplugging VIFs for instance {{(pid=61962) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:2998}} [ 2218.549839] env[61962]: DEBUG nova.compute.manager [None req-36219049-67ba-4c74-811b-90c296d8053e tempest-ImagesTestJSON-240028984 tempest-ImagesTestJSON-240028984-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=61962) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3021}} [ 2218.550021] env[61962]: DEBUG nova.compute.manager [None req-36219049-67ba-4c74-811b-90c296d8053e tempest-ImagesTestJSON-240028984 tempest-ImagesTestJSON-240028984-project-member] [instance: 85003695-cb3e-4bce-9a3a-a1d799b36369] Deallocating network for instance {{(pid=61962) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2281}} [ 2218.550232] env[61962]: DEBUG nova.network.neutron [None req-36219049-67ba-4c74-811b-90c296d8053e tempest-ImagesTestJSON-240028984 tempest-ImagesTestJSON-240028984-project-member] [instance: 85003695-cb3e-4bce-9a3a-a1d799b36369] deallocate_for_instance() {{(pid=61962) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 2218.935509] env[61962]: DEBUG nova.network.neutron [None req-36219049-67ba-4c74-811b-90c296d8053e tempest-ImagesTestJSON-240028984 tempest-ImagesTestJSON-240028984-project-member] [instance: 85003695-cb3e-4bce-9a3a-a1d799b36369] Updating instance_info_cache with network_info: [] {{(pid=61962) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2218.946825] env[61962]: INFO nova.compute.manager [None req-36219049-67ba-4c74-811b-90c296d8053e tempest-ImagesTestJSON-240028984 tempest-ImagesTestJSON-240028984-project-member] [instance: 85003695-cb3e-4bce-9a3a-a1d799b36369] Took 0.40 seconds to deallocate network for instance. [ 2219.037727] env[61962]: INFO nova.scheduler.client.report [None req-36219049-67ba-4c74-811b-90c296d8053e tempest-ImagesTestJSON-240028984 tempest-ImagesTestJSON-240028984-project-member] Deleted allocations for instance 85003695-cb3e-4bce-9a3a-a1d799b36369 [ 2219.074239] env[61962]: DEBUG oslo_concurrency.lockutils [None req-36219049-67ba-4c74-811b-90c296d8053e tempest-ImagesTestJSON-240028984 tempest-ImagesTestJSON-240028984-project-member] Lock "85003695-cb3e-4bce-9a3a-a1d799b36369" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 540.760s {{(pid=61962) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2219.074522] env[61962]: DEBUG oslo_concurrency.lockutils [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Lock "85003695-cb3e-4bce-9a3a-a1d799b36369" acquired by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: waited 481.555s {{(pid=61962) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2219.074825] env[61962]: INFO nova.compute.manager [None req-ba337279-4320-40ca-b616-7e590432f203 None None] [instance: 85003695-cb3e-4bce-9a3a-a1d799b36369] During sync_power_state the instance has a pending task (spawning). Skip. [ 2219.074961] env[61962]: DEBUG oslo_concurrency.lockutils [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Lock "85003695-cb3e-4bce-9a3a-a1d799b36369" "released" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: held 0.000s {{(pid=61962) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2219.075520] env[61962]: DEBUG oslo_concurrency.lockutils [None req-d38b05bf-5e20-46d2-9e7d-0ef646235a5c tempest-ImagesTestJSON-240028984 tempest-ImagesTestJSON-240028984-project-member] Lock "85003695-cb3e-4bce-9a3a-a1d799b36369" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 344.938s {{(pid=61962) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2219.075764] env[61962]: DEBUG oslo_concurrency.lockutils [None req-d38b05bf-5e20-46d2-9e7d-0ef646235a5c tempest-ImagesTestJSON-240028984 tempest-ImagesTestJSON-240028984-project-member] Acquiring lock "85003695-cb3e-4bce-9a3a-a1d799b36369-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=61962) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2219.075979] env[61962]: DEBUG oslo_concurrency.lockutils [None req-d38b05bf-5e20-46d2-9e7d-0ef646235a5c tempest-ImagesTestJSON-240028984 tempest-ImagesTestJSON-240028984-project-member] Lock "85003695-cb3e-4bce-9a3a-a1d799b36369-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=61962) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2219.076370] env[61962]: DEBUG oslo_concurrency.lockutils [None req-d38b05bf-5e20-46d2-9e7d-0ef646235a5c tempest-ImagesTestJSON-240028984 tempest-ImagesTestJSON-240028984-project-member] Lock "85003695-cb3e-4bce-9a3a-a1d799b36369-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=61962) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2219.078555] env[61962]: INFO nova.compute.manager [None req-d38b05bf-5e20-46d2-9e7d-0ef646235a5c tempest-ImagesTestJSON-240028984 tempest-ImagesTestJSON-240028984-project-member] [instance: 85003695-cb3e-4bce-9a3a-a1d799b36369] Terminating instance [ 2219.080469] env[61962]: DEBUG nova.compute.manager [None req-d38b05bf-5e20-46d2-9e7d-0ef646235a5c tempest-ImagesTestJSON-240028984 tempest-ImagesTestJSON-240028984-project-member] [instance: 85003695-cb3e-4bce-9a3a-a1d799b36369] Start destroying the instance on the hypervisor. {{(pid=61962) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3142}} [ 2219.080678] env[61962]: DEBUG nova.virt.vmwareapi.vmops [None req-d38b05bf-5e20-46d2-9e7d-0ef646235a5c tempest-ImagesTestJSON-240028984 tempest-ImagesTestJSON-240028984-project-member] [instance: 85003695-cb3e-4bce-9a3a-a1d799b36369] Destroying instance {{(pid=61962) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 2219.081193] env[61962]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-bffbd3c8-f187-4c74-8f3a-d09d86d3b90b {{(pid=61962) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2219.092380] env[61962]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2a2c3c44-9f4c-4cc7-8860-5a1e85c9e4cd {{(pid=61962) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2219.121718] env[61962]: WARNING nova.virt.vmwareapi.vmops [None req-d38b05bf-5e20-46d2-9e7d-0ef646235a5c tempest-ImagesTestJSON-240028984 tempest-ImagesTestJSON-240028984-project-member] [instance: 85003695-cb3e-4bce-9a3a-a1d799b36369] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 85003695-cb3e-4bce-9a3a-a1d799b36369 could not be found. [ 2219.121870] env[61962]: DEBUG nova.virt.vmwareapi.vmops [None req-d38b05bf-5e20-46d2-9e7d-0ef646235a5c tempest-ImagesTestJSON-240028984 tempest-ImagesTestJSON-240028984-project-member] [instance: 85003695-cb3e-4bce-9a3a-a1d799b36369] Instance destroyed {{(pid=61962) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 2219.122609] env[61962]: INFO nova.compute.manager [None req-d38b05bf-5e20-46d2-9e7d-0ef646235a5c tempest-ImagesTestJSON-240028984 tempest-ImagesTestJSON-240028984-project-member] [instance: 85003695-cb3e-4bce-9a3a-a1d799b36369] Took 0.04 seconds to destroy the instance on the hypervisor. [ 2219.122609] env[61962]: DEBUG oslo.service.loopingcall [None req-d38b05bf-5e20-46d2-9e7d-0ef646235a5c tempest-ImagesTestJSON-240028984 tempest-ImagesTestJSON-240028984-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61962) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 2219.122609] env[61962]: DEBUG nova.compute.manager [-] [instance: 85003695-cb3e-4bce-9a3a-a1d799b36369] Deallocating network for instance {{(pid=61962) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2281}} [ 2219.122802] env[61962]: DEBUG nova.network.neutron [-] [instance: 85003695-cb3e-4bce-9a3a-a1d799b36369] deallocate_for_instance() {{(pid=61962) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 2219.149464] env[61962]: DEBUG nova.network.neutron [-] [instance: 85003695-cb3e-4bce-9a3a-a1d799b36369] Updating instance_info_cache with network_info: [] {{(pid=61962) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2219.158369] env[61962]: INFO nova.compute.manager [-] [instance: 85003695-cb3e-4bce-9a3a-a1d799b36369] Took 0.04 seconds to deallocate network for instance. [ 2219.253783] env[61962]: DEBUG oslo_concurrency.lockutils [None req-d38b05bf-5e20-46d2-9e7d-0ef646235a5c tempest-ImagesTestJSON-240028984 tempest-ImagesTestJSON-240028984-project-member] Lock "85003695-cb3e-4bce-9a3a-a1d799b36369" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 0.178s {{(pid=61962) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2256.160052] env[61962]: DEBUG oslo_service.periodic_task [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Running periodic task ComputeManager.update_available_resource {{(pid=61962) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2256.171224] env[61962]: DEBUG oslo_concurrency.lockutils [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=61962) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2256.171434] env[61962]: DEBUG oslo_concurrency.lockutils [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=61962) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2256.171601] env[61962]: DEBUG oslo_concurrency.lockutils [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=61962) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2256.171806] env[61962]: DEBUG nova.compute.resource_tracker [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=61962) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 2256.172971] env[61962]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-23243449-04ee-409e-8b03-f84223a02d45 {{(pid=61962) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2256.181887] env[61962]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a9e97971-517d-433b-9d73-0436f0a2cc5a {{(pid=61962) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2256.195774] env[61962]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e0f03efe-d140-4652-8568-b9ec96daeeef {{(pid=61962) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2256.201925] env[61962]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a90f7ee6-303d-4513-aea1-c3e7a342c354 {{(pid=61962) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2256.231786] env[61962]: DEBUG nova.compute.resource_tracker [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=180551MB free_disk=96GB free_vcpus=48 pci_devices=None {{(pid=61962) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 2256.231951] env[61962]: DEBUG oslo_concurrency.lockutils [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=61962) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2256.232135] env[61962]: DEBUG oslo_concurrency.lockutils [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=61962) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2256.280804] env[61962]: DEBUG nova.compute.resource_tracker [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Instance 1a74c820-67e9-424c-b4e6-333db3449251 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61962) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 2256.280960] env[61962]: DEBUG nova.compute.resource_tracker [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Instance c2e03a3a-a3ac-4661-90f1-1e895a71b289 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61962) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 2256.281158] env[61962]: DEBUG nova.compute.resource_tracker [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Total usable vcpus: 48, total allocated vcpus: 2 {{(pid=61962) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 2256.281312] env[61962]: DEBUG nova.compute.resource_tracker [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=768MB phys_disk=100GB used_disk=2GB total_vcpus=48 used_vcpus=2 pci_stats=[] stats={'failed_builds': '3', 'num_instances': '2', 'num_vm_building': '2', 'num_task_deleting': '1', 'num_os_type_None': '2', 'num_proj_cc34536d51ef4a22aef0b274ba5aa28e': '1', 'io_workload': '2', 'num_task_spawning': '1', 'num_proj_d09904dce58b4cee83cebb57e6ad26a4': '1'} {{(pid=61962) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 2256.324037] env[61962]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-572f2fd4-191c-4cb0-87db-78b45146a302 {{(pid=61962) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2256.331200] env[61962]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-968b6dfa-d656-4f0d-8c23-ad06116abe75 {{(pid=61962) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2256.361353] env[61962]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-aadefe98-b533-449a-9a00-db6ddaad9995 {{(pid=61962) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2256.369162] env[61962]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c8734b48-3e79-43c4-b1b7-baabe0fc3d02 {{(pid=61962) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2256.382380] env[61962]: DEBUG nova.compute.provider_tree [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Inventory has not changed in ProviderTree for provider: 5a20dc57-fddd-49ec-bab5-953a03eebaa1 {{(pid=61962) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2256.391627] env[61962]: DEBUG nova.scheduler.client.report [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Inventory has not changed for provider 5a20dc57-fddd-49ec-bab5-953a03eebaa1 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 96, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61962) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 2256.405564] env[61962]: DEBUG nova.compute.resource_tracker [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=61962) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 2256.405753] env[61962]: DEBUG oslo_concurrency.lockutils [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 0.174s {{(pid=61962) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2260.405974] env[61962]: DEBUG oslo_service.periodic_task [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=61962) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2260.406361] env[61962]: DEBUG nova.compute.manager [None req-ba337279-4320-40ca-b616-7e590432f203 None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=61962) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10626}} [ 2265.154315] env[61962]: DEBUG oslo_service.periodic_task [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=61962) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2267.065527] env[61962]: WARNING oslo_vmware.rw_handles [None req-baa26604-ba56-4dd3-8c26-a36534ca2fce tempest-ServersTestJSON-536512511 tempest-ServersTestJSON-536512511-project-member] Error occurred while reading the HTTP response.: http.client.RemoteDisconnected: Remote end closed connection without response [ 2267.065527] env[61962]: ERROR oslo_vmware.rw_handles Traceback (most recent call last): [ 2267.065527] env[61962]: ERROR oslo_vmware.rw_handles File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py", line 283, in close [ 2267.065527] env[61962]: ERROR oslo_vmware.rw_handles self._conn.getresponse() [ 2267.065527] env[61962]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 1375, in getresponse [ 2267.065527] env[61962]: ERROR oslo_vmware.rw_handles response.begin() [ 2267.065527] env[61962]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 318, in begin [ 2267.065527] env[61962]: ERROR oslo_vmware.rw_handles version, status, reason = self._read_status() [ 2267.065527] env[61962]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 287, in _read_status [ 2267.065527] env[61962]: ERROR oslo_vmware.rw_handles raise RemoteDisconnected("Remote end closed connection without" [ 2267.065527] env[61962]: ERROR oslo_vmware.rw_handles http.client.RemoteDisconnected: Remote end closed connection without response [ 2267.065527] env[61962]: ERROR oslo_vmware.rw_handles [ 2267.066237] env[61962]: DEBUG nova.virt.vmwareapi.images [None req-baa26604-ba56-4dd3-8c26-a36534ca2fce tempest-ServersTestJSON-536512511 tempest-ServersTestJSON-536512511-project-member] [instance: 1a74c820-67e9-424c-b4e6-333db3449251] Downloaded image file data f684bb17-3ab2-4bd5-a19e-4c36c57d0ebe to vmware_temp/7c19fa3e-55f1-4a02-b989-fd81f811a060/f684bb17-3ab2-4bd5-a19e-4c36c57d0ebe/tmp-sparse.vmdk on the data store datastore2 {{(pid=61962) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:258}} [ 2267.068040] env[61962]: DEBUG nova.virt.vmwareapi.vmops [None req-baa26604-ba56-4dd3-8c26-a36534ca2fce tempest-ServersTestJSON-536512511 tempest-ServersTestJSON-536512511-project-member] [instance: 1a74c820-67e9-424c-b4e6-333db3449251] Caching image {{(pid=61962) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 2267.068156] env[61962]: DEBUG nova.virt.vmwareapi.vm_util [None req-baa26604-ba56-4dd3-8c26-a36534ca2fce tempest-ServersTestJSON-536512511 tempest-ServersTestJSON-536512511-project-member] Copying Virtual Disk [datastore2] vmware_temp/7c19fa3e-55f1-4a02-b989-fd81f811a060/f684bb17-3ab2-4bd5-a19e-4c36c57d0ebe/tmp-sparse.vmdk to [datastore2] vmware_temp/7c19fa3e-55f1-4a02-b989-fd81f811a060/f684bb17-3ab2-4bd5-a19e-4c36c57d0ebe/f684bb17-3ab2-4bd5-a19e-4c36c57d0ebe.vmdk {{(pid=61962) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 2267.068469] env[61962]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-c6e2006c-7ff6-42fe-a82e-c8fa48f49136 {{(pid=61962) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2267.076816] env[61962]: DEBUG oslo_vmware.api [None req-baa26604-ba56-4dd3-8c26-a36534ca2fce tempest-ServersTestJSON-536512511 tempest-ServersTestJSON-536512511-project-member] Waiting for the task: (returnval){ [ 2267.076816] env[61962]: value = "task-4892135" [ 2267.076816] env[61962]: _type = "Task" [ 2267.076816] env[61962]: } to complete. {{(pid=61962) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2267.086169] env[61962]: DEBUG oslo_vmware.api [None req-baa26604-ba56-4dd3-8c26-a36534ca2fce tempest-ServersTestJSON-536512511 tempest-ServersTestJSON-536512511-project-member] Task: {'id': task-4892135, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61962) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2267.158305] env[61962]: DEBUG oslo_service.periodic_task [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=61962) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2267.588063] env[61962]: DEBUG oslo_vmware.exceptions [None req-baa26604-ba56-4dd3-8c26-a36534ca2fce tempest-ServersTestJSON-536512511 tempest-ServersTestJSON-536512511-project-member] Fault InvalidArgument not matched. {{(pid=61962) get_fault_class /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/exceptions.py:290}} [ 2267.588286] env[61962]: DEBUG oslo_concurrency.lockutils [None req-baa26604-ba56-4dd3-8c26-a36534ca2fce tempest-ServersTestJSON-536512511 tempest-ServersTestJSON-536512511-project-member] Releasing lock "[datastore2] devstack-image-cache_base/f684bb17-3ab2-4bd5-a19e-4c36c57d0ebe/f684bb17-3ab2-4bd5-a19e-4c36c57d0ebe.vmdk" {{(pid=61962) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2267.588856] env[61962]: ERROR nova.compute.manager [None req-baa26604-ba56-4dd3-8c26-a36534ca2fce tempest-ServersTestJSON-536512511 tempest-ServersTestJSON-536512511-project-member] [instance: 1a74c820-67e9-424c-b4e6-333db3449251] Instance failed to spawn: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 2267.588856] env[61962]: Faults: ['InvalidArgument'] [ 2267.588856] env[61962]: ERROR nova.compute.manager [instance: 1a74c820-67e9-424c-b4e6-333db3449251] Traceback (most recent call last): [ 2267.588856] env[61962]: ERROR nova.compute.manager [instance: 1a74c820-67e9-424c-b4e6-333db3449251] File "/opt/stack/nova/nova/compute/manager.py", line 2886, in _build_resources [ 2267.588856] env[61962]: ERROR nova.compute.manager [instance: 1a74c820-67e9-424c-b4e6-333db3449251] yield resources [ 2267.588856] env[61962]: ERROR nova.compute.manager [instance: 1a74c820-67e9-424c-b4e6-333db3449251] File "/opt/stack/nova/nova/compute/manager.py", line 2633, in _build_and_run_instance [ 2267.588856] env[61962]: ERROR nova.compute.manager [instance: 1a74c820-67e9-424c-b4e6-333db3449251] self.driver.spawn(context, instance, image_meta, [ 2267.588856] env[61962]: ERROR nova.compute.manager [instance: 1a74c820-67e9-424c-b4e6-333db3449251] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 2267.588856] env[61962]: ERROR nova.compute.manager [instance: 1a74c820-67e9-424c-b4e6-333db3449251] self._vmops.spawn(context, instance, image_meta, injected_files, [ 2267.588856] env[61962]: ERROR nova.compute.manager [instance: 1a74c820-67e9-424c-b4e6-333db3449251] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 2267.588856] env[61962]: ERROR nova.compute.manager [instance: 1a74c820-67e9-424c-b4e6-333db3449251] self._fetch_image_if_missing(context, vi) [ 2267.588856] env[61962]: ERROR nova.compute.manager [instance: 1a74c820-67e9-424c-b4e6-333db3449251] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 2267.588856] env[61962]: ERROR nova.compute.manager [instance: 1a74c820-67e9-424c-b4e6-333db3449251] image_cache(vi, tmp_image_ds_loc) [ 2267.589556] env[61962]: ERROR nova.compute.manager [instance: 1a74c820-67e9-424c-b4e6-333db3449251] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 2267.589556] env[61962]: ERROR nova.compute.manager [instance: 1a74c820-67e9-424c-b4e6-333db3449251] vm_util.copy_virtual_disk( [ 2267.589556] env[61962]: ERROR nova.compute.manager [instance: 1a74c820-67e9-424c-b4e6-333db3449251] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 2267.589556] env[61962]: ERROR nova.compute.manager [instance: 1a74c820-67e9-424c-b4e6-333db3449251] session._wait_for_task(vmdk_copy_task) [ 2267.589556] env[61962]: ERROR nova.compute.manager [instance: 1a74c820-67e9-424c-b4e6-333db3449251] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 2267.589556] env[61962]: ERROR nova.compute.manager [instance: 1a74c820-67e9-424c-b4e6-333db3449251] return self.wait_for_task(task_ref) [ 2267.589556] env[61962]: ERROR nova.compute.manager [instance: 1a74c820-67e9-424c-b4e6-333db3449251] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 2267.589556] env[61962]: ERROR nova.compute.manager [instance: 1a74c820-67e9-424c-b4e6-333db3449251] return evt.wait() [ 2267.589556] env[61962]: ERROR nova.compute.manager [instance: 1a74c820-67e9-424c-b4e6-333db3449251] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 2267.589556] env[61962]: ERROR nova.compute.manager [instance: 1a74c820-67e9-424c-b4e6-333db3449251] result = hub.switch() [ 2267.589556] env[61962]: ERROR nova.compute.manager [instance: 1a74c820-67e9-424c-b4e6-333db3449251] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 2267.589556] env[61962]: ERROR nova.compute.manager [instance: 1a74c820-67e9-424c-b4e6-333db3449251] return self.greenlet.switch() [ 2267.589556] env[61962]: ERROR nova.compute.manager [instance: 1a74c820-67e9-424c-b4e6-333db3449251] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 2267.590290] env[61962]: ERROR nova.compute.manager [instance: 1a74c820-67e9-424c-b4e6-333db3449251] self.f(*self.args, **self.kw) [ 2267.590290] env[61962]: ERROR nova.compute.manager [instance: 1a74c820-67e9-424c-b4e6-333db3449251] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 2267.590290] env[61962]: ERROR nova.compute.manager [instance: 1a74c820-67e9-424c-b4e6-333db3449251] raise exceptions.translate_fault(task_info.error) [ 2267.590290] env[61962]: ERROR nova.compute.manager [instance: 1a74c820-67e9-424c-b4e6-333db3449251] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 2267.590290] env[61962]: ERROR nova.compute.manager [instance: 1a74c820-67e9-424c-b4e6-333db3449251] Faults: ['InvalidArgument'] [ 2267.590290] env[61962]: ERROR nova.compute.manager [instance: 1a74c820-67e9-424c-b4e6-333db3449251] [ 2267.590290] env[61962]: INFO nova.compute.manager [None req-baa26604-ba56-4dd3-8c26-a36534ca2fce tempest-ServersTestJSON-536512511 tempest-ServersTestJSON-536512511-project-member] [instance: 1a74c820-67e9-424c-b4e6-333db3449251] Terminating instance [ 2267.590983] env[61962]: DEBUG oslo_concurrency.lockutils [None req-421224d2-c9e0-4f53-b6ec-296507e9118a tempest-DeleteServersTestJSON-1634339650 tempest-DeleteServersTestJSON-1634339650-project-member] Acquired lock "[datastore2] devstack-image-cache_base/f684bb17-3ab2-4bd5-a19e-4c36c57d0ebe/f684bb17-3ab2-4bd5-a19e-4c36c57d0ebe.vmdk" {{(pid=61962) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2267.591288] env[61962]: DEBUG nova.virt.vmwareapi.ds_util [None req-421224d2-c9e0-4f53-b6ec-296507e9118a tempest-DeleteServersTestJSON-1634339650 tempest-DeleteServersTestJSON-1634339650-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=61962) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 2267.591599] env[61962]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-fbff1b4e-fdf9-48e8-9b39-bdf1a868d56a {{(pid=61962) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2267.594315] env[61962]: DEBUG nova.compute.manager [None req-baa26604-ba56-4dd3-8c26-a36534ca2fce tempest-ServersTestJSON-536512511 tempest-ServersTestJSON-536512511-project-member] [instance: 1a74c820-67e9-424c-b4e6-333db3449251] Start destroying the instance on the hypervisor. {{(pid=61962) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3142}} [ 2267.594505] env[61962]: DEBUG nova.virt.vmwareapi.vmops [None req-baa26604-ba56-4dd3-8c26-a36534ca2fce tempest-ServersTestJSON-536512511 tempest-ServersTestJSON-536512511-project-member] [instance: 1a74c820-67e9-424c-b4e6-333db3449251] Destroying instance {{(pid=61962) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 2267.595313] env[61962]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fd05dd32-e818-48b8-997b-3784abfd841e {{(pid=61962) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2267.603115] env[61962]: DEBUG nova.virt.vmwareapi.vmops [None req-baa26604-ba56-4dd3-8c26-a36534ca2fce tempest-ServersTestJSON-536512511 tempest-ServersTestJSON-536512511-project-member] [instance: 1a74c820-67e9-424c-b4e6-333db3449251] Unregistering the VM {{(pid=61962) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 2267.603353] env[61962]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-1cbbcd60-3a50-466a-927f-92f70e48eeca {{(pid=61962) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2267.605592] env[61962]: DEBUG nova.virt.vmwareapi.ds_util [None req-421224d2-c9e0-4f53-b6ec-296507e9118a tempest-DeleteServersTestJSON-1634339650 tempest-DeleteServersTestJSON-1634339650-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=61962) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 2267.605775] env[61962]: DEBUG nova.virt.vmwareapi.vmops [None req-421224d2-c9e0-4f53-b6ec-296507e9118a tempest-DeleteServersTestJSON-1634339650 tempest-DeleteServersTestJSON-1634339650-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=61962) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 2267.607013] env[61962]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-37cf99de-966c-4b45-b002-2acb9ca6b35d {{(pid=61962) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2267.612576] env[61962]: DEBUG oslo_vmware.api [None req-421224d2-c9e0-4f53-b6ec-296507e9118a tempest-DeleteServersTestJSON-1634339650 tempest-DeleteServersTestJSON-1634339650-project-member] Waiting for the task: (returnval){ [ 2267.612576] env[61962]: value = "session[5210918f-aadc-9b29-42fa-0929c2e42627]52bdaf19-4dfa-25f3-06df-720f8dc1142e" [ 2267.612576] env[61962]: _type = "Task" [ 2267.612576] env[61962]: } to complete. {{(pid=61962) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2267.621231] env[61962]: DEBUG oslo_vmware.api [None req-421224d2-c9e0-4f53-b6ec-296507e9118a tempest-DeleteServersTestJSON-1634339650 tempest-DeleteServersTestJSON-1634339650-project-member] Task: {'id': session[5210918f-aadc-9b29-42fa-0929c2e42627]52bdaf19-4dfa-25f3-06df-720f8dc1142e, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61962) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2267.664646] env[61962]: DEBUG nova.virt.vmwareapi.vmops [None req-baa26604-ba56-4dd3-8c26-a36534ca2fce tempest-ServersTestJSON-536512511 tempest-ServersTestJSON-536512511-project-member] [instance: 1a74c820-67e9-424c-b4e6-333db3449251] Unregistered the VM {{(pid=61962) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 2267.664892] env[61962]: DEBUG nova.virt.vmwareapi.vmops [None req-baa26604-ba56-4dd3-8c26-a36534ca2fce tempest-ServersTestJSON-536512511 tempest-ServersTestJSON-536512511-project-member] [instance: 1a74c820-67e9-424c-b4e6-333db3449251] Deleting contents of the VM from datastore datastore2 {{(pid=61962) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 2267.665040] env[61962]: DEBUG nova.virt.vmwareapi.ds_util [None req-baa26604-ba56-4dd3-8c26-a36534ca2fce tempest-ServersTestJSON-536512511 tempest-ServersTestJSON-536512511-project-member] Deleting the datastore file [datastore2] 1a74c820-67e9-424c-b4e6-333db3449251 {{(pid=61962) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 2267.665327] env[61962]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-0aa55351-fd2d-4856-aaf3-612500110dad {{(pid=61962) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2267.672508] env[61962]: DEBUG oslo_vmware.api [None req-baa26604-ba56-4dd3-8c26-a36534ca2fce tempest-ServersTestJSON-536512511 tempest-ServersTestJSON-536512511-project-member] Waiting for the task: (returnval){ [ 2267.672508] env[61962]: value = "task-4892137" [ 2267.672508] env[61962]: _type = "Task" [ 2267.672508] env[61962]: } to complete. {{(pid=61962) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2267.681100] env[61962]: DEBUG oslo_vmware.api [None req-baa26604-ba56-4dd3-8c26-a36534ca2fce tempest-ServersTestJSON-536512511 tempest-ServersTestJSON-536512511-project-member] Task: {'id': task-4892137, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61962) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2268.123549] env[61962]: DEBUG nova.virt.vmwareapi.vmops [None req-421224d2-c9e0-4f53-b6ec-296507e9118a tempest-DeleteServersTestJSON-1634339650 tempest-DeleteServersTestJSON-1634339650-project-member] [instance: c2e03a3a-a3ac-4661-90f1-1e895a71b289] Preparing fetch location {{(pid=61962) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 2268.123933] env[61962]: DEBUG nova.virt.vmwareapi.ds_util [None req-421224d2-c9e0-4f53-b6ec-296507e9118a tempest-DeleteServersTestJSON-1634339650 tempest-DeleteServersTestJSON-1634339650-project-member] Creating directory with path [datastore2] vmware_temp/4ec24a30-26ce-445f-bc9e-4bbeae161378/f684bb17-3ab2-4bd5-a19e-4c36c57d0ebe {{(pid=61962) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 2268.123980] env[61962]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-41fe3351-0bc9-4972-bd0d-f3909c7b4341 {{(pid=61962) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2268.136019] env[61962]: DEBUG nova.virt.vmwareapi.ds_util [None req-421224d2-c9e0-4f53-b6ec-296507e9118a tempest-DeleteServersTestJSON-1634339650 tempest-DeleteServersTestJSON-1634339650-project-member] Created directory with path [datastore2] vmware_temp/4ec24a30-26ce-445f-bc9e-4bbeae161378/f684bb17-3ab2-4bd5-a19e-4c36c57d0ebe {{(pid=61962) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 2268.136259] env[61962]: DEBUG nova.virt.vmwareapi.vmops [None req-421224d2-c9e0-4f53-b6ec-296507e9118a tempest-DeleteServersTestJSON-1634339650 tempest-DeleteServersTestJSON-1634339650-project-member] [instance: c2e03a3a-a3ac-4661-90f1-1e895a71b289] Fetch image to [datastore2] vmware_temp/4ec24a30-26ce-445f-bc9e-4bbeae161378/f684bb17-3ab2-4bd5-a19e-4c36c57d0ebe/tmp-sparse.vmdk {{(pid=61962) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 2268.136375] env[61962]: DEBUG nova.virt.vmwareapi.vmops [None req-421224d2-c9e0-4f53-b6ec-296507e9118a tempest-DeleteServersTestJSON-1634339650 tempest-DeleteServersTestJSON-1634339650-project-member] [instance: c2e03a3a-a3ac-4661-90f1-1e895a71b289] Downloading image file data f684bb17-3ab2-4bd5-a19e-4c36c57d0ebe to [datastore2] vmware_temp/4ec24a30-26ce-445f-bc9e-4bbeae161378/f684bb17-3ab2-4bd5-a19e-4c36c57d0ebe/tmp-sparse.vmdk on the data store datastore2 {{(pid=61962) _fetch_image_as_file /opt/stack/nova/nova/virt/vmwareapi/vmops.py:399}} [ 2268.137144] env[61962]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-24627c2f-2547-4a15-9f0b-3445c5ddf210 {{(pid=61962) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2268.144339] env[61962]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cafc266f-511c-4198-bc28-cec9dfdf7633 {{(pid=61962) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2268.153539] env[61962]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-47eec133-c81d-4f13-b15c-a9d11f8c8d6d {{(pid=61962) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2268.157670] env[61962]: DEBUG oslo_service.periodic_task [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=61962) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2268.157826] env[61962]: DEBUG nova.compute.manager [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Starting heal instance info cache {{(pid=61962) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10007}} [ 2268.157946] env[61962]: DEBUG nova.compute.manager [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Rebuilding the list of instances to heal {{(pid=61962) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10011}} [ 2268.190584] env[61962]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a822ca6e-6359-4c44-b6dd-faba0e08c97a {{(pid=61962) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2268.194438] env[61962]: DEBUG nova.compute.manager [None req-ba337279-4320-40ca-b616-7e590432f203 None None] [instance: 1a74c820-67e9-424c-b4e6-333db3449251] Skipping network cache update for instance because it is Building. {{(pid=61962) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10020}} [ 2268.194438] env[61962]: DEBUG nova.compute.manager [None req-ba337279-4320-40ca-b616-7e590432f203 None None] [instance: c2e03a3a-a3ac-4661-90f1-1e895a71b289] Skipping network cache update for instance because it is Building. {{(pid=61962) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10020}} [ 2268.194438] env[61962]: DEBUG nova.compute.manager [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Didn't find any instances for network info cache update. {{(pid=61962) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10093}} [ 2268.194835] env[61962]: DEBUG oslo_service.periodic_task [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=61962) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2268.200342] env[61962]: DEBUG oslo_vmware.api [None req-baa26604-ba56-4dd3-8c26-a36534ca2fce tempest-ServersTestJSON-536512511 tempest-ServersTestJSON-536512511-project-member] Task: {'id': task-4892137, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.07885} completed successfully. {{(pid=61962) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2268.201853] env[61962]: DEBUG nova.virt.vmwareapi.ds_util [None req-baa26604-ba56-4dd3-8c26-a36534ca2fce tempest-ServersTestJSON-536512511 tempest-ServersTestJSON-536512511-project-member] Deleted the datastore file {{(pid=61962) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 2268.202008] env[61962]: DEBUG nova.virt.vmwareapi.vmops [None req-baa26604-ba56-4dd3-8c26-a36534ca2fce tempest-ServersTestJSON-536512511 tempest-ServersTestJSON-536512511-project-member] [instance: 1a74c820-67e9-424c-b4e6-333db3449251] Deleted contents of the VM from datastore datastore2 {{(pid=61962) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 2268.202196] env[61962]: DEBUG nova.virt.vmwareapi.vmops [None req-baa26604-ba56-4dd3-8c26-a36534ca2fce tempest-ServersTestJSON-536512511 tempest-ServersTestJSON-536512511-project-member] [instance: 1a74c820-67e9-424c-b4e6-333db3449251] Instance destroyed {{(pid=61962) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 2268.202366] env[61962]: INFO nova.compute.manager [None req-baa26604-ba56-4dd3-8c26-a36534ca2fce tempest-ServersTestJSON-536512511 tempest-ServersTestJSON-536512511-project-member] [instance: 1a74c820-67e9-424c-b4e6-333db3449251] Took 0.61 seconds to destroy the instance on the hypervisor. [ 2268.204620] env[61962]: DEBUG nova.compute.claims [None req-baa26604-ba56-4dd3-8c26-a36534ca2fce tempest-ServersTestJSON-536512511 tempest-ServersTestJSON-536512511-project-member] [instance: 1a74c820-67e9-424c-b4e6-333db3449251] Aborting claim: {{(pid=61962) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 2268.205267] env[61962]: DEBUG oslo_concurrency.lockutils [None req-baa26604-ba56-4dd3-8c26-a36534ca2fce tempest-ServersTestJSON-536512511 tempest-ServersTestJSON-536512511-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=61962) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2268.205542] env[61962]: DEBUG oslo_concurrency.lockutils [None req-baa26604-ba56-4dd3-8c26-a36534ca2fce tempest-ServersTestJSON-536512511 tempest-ServersTestJSON-536512511-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 0.001s {{(pid=61962) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2268.208216] env[61962]: DEBUG oslo_vmware.service [-] Invoking SessionManager.AcquireGenericServiceTicket with opID=oslo.vmware-a8ad56a6-689b-4e0f-acc9-f8167189f7a7 {{(pid=61962) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2268.234058] env[61962]: DEBUG nova.virt.vmwareapi.images [None req-421224d2-c9e0-4f53-b6ec-296507e9118a tempest-DeleteServersTestJSON-1634339650 tempest-DeleteServersTestJSON-1634339650-project-member] [instance: c2e03a3a-a3ac-4661-90f1-1e895a71b289] Downloading image file data f684bb17-3ab2-4bd5-a19e-4c36c57d0ebe to the data store datastore2 {{(pid=61962) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:245}} [ 2268.288648] env[61962]: DEBUG oslo_vmware.rw_handles [None req-421224d2-c9e0-4f53-b6ec-296507e9118a tempest-DeleteServersTestJSON-1634339650 tempest-DeleteServersTestJSON-1634339650-project-member] Creating HTTP connection to write to file with size = 21318656 and URL = https://esx7c2n2.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/4ec24a30-26ce-445f-bc9e-4bbeae161378/f684bb17-3ab2-4bd5-a19e-4c36c57d0ebe/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=61962) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 2268.291545] env[61962]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dc5595a4-c86e-4138-988b-177e7c083e23 {{(pid=61962) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2268.352240] env[61962]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-028e9e5d-55b8-4db4-8241-8d6643c410c4 {{(pid=61962) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2268.357546] env[61962]: DEBUG oslo_vmware.rw_handles [None req-421224d2-c9e0-4f53-b6ec-296507e9118a tempest-DeleteServersTestJSON-1634339650 tempest-DeleteServersTestJSON-1634339650-project-member] Completed reading data from the image iterator. {{(pid=61962) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 2268.357752] env[61962]: DEBUG oslo_vmware.rw_handles [None req-421224d2-c9e0-4f53-b6ec-296507e9118a tempest-DeleteServersTestJSON-1634339650 tempest-DeleteServersTestJSON-1634339650-project-member] Closing write handle for https://esx7c2n2.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/4ec24a30-26ce-445f-bc9e-4bbeae161378/f684bb17-3ab2-4bd5-a19e-4c36c57d0ebe/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=61962) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:281}} [ 2268.385344] env[61962]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3f7a5f7e-fc25-437a-8d0f-53d15bf998b9 {{(pid=61962) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2268.393426] env[61962]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f234f373-a353-4139-b8b1-f590c8ad318d {{(pid=61962) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2268.406759] env[61962]: DEBUG nova.compute.provider_tree [None req-baa26604-ba56-4dd3-8c26-a36534ca2fce tempest-ServersTestJSON-536512511 tempest-ServersTestJSON-536512511-project-member] Inventory has not changed in ProviderTree for provider: 5a20dc57-fddd-49ec-bab5-953a03eebaa1 {{(pid=61962) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2268.416877] env[61962]: DEBUG nova.scheduler.client.report [None req-baa26604-ba56-4dd3-8c26-a36534ca2fce tempest-ServersTestJSON-536512511 tempest-ServersTestJSON-536512511-project-member] Inventory has not changed for provider 5a20dc57-fddd-49ec-bab5-953a03eebaa1 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 96, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61962) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 2268.430285] env[61962]: DEBUG oslo_concurrency.lockutils [None req-baa26604-ba56-4dd3-8c26-a36534ca2fce tempest-ServersTestJSON-536512511 tempest-ServersTestJSON-536512511-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 0.225s {{(pid=61962) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2268.430856] env[61962]: ERROR nova.compute.manager [None req-baa26604-ba56-4dd3-8c26-a36534ca2fce tempest-ServersTestJSON-536512511 tempest-ServersTestJSON-536512511-project-member] [instance: 1a74c820-67e9-424c-b4e6-333db3449251] Failed to build and run instance: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 2268.430856] env[61962]: Faults: ['InvalidArgument'] [ 2268.430856] env[61962]: ERROR nova.compute.manager [instance: 1a74c820-67e9-424c-b4e6-333db3449251] Traceback (most recent call last): [ 2268.430856] env[61962]: ERROR nova.compute.manager [instance: 1a74c820-67e9-424c-b4e6-333db3449251] File "/opt/stack/nova/nova/compute/manager.py", line 2633, in _build_and_run_instance [ 2268.430856] env[61962]: ERROR nova.compute.manager [instance: 1a74c820-67e9-424c-b4e6-333db3449251] self.driver.spawn(context, instance, image_meta, [ 2268.430856] env[61962]: ERROR nova.compute.manager [instance: 1a74c820-67e9-424c-b4e6-333db3449251] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 2268.430856] env[61962]: ERROR nova.compute.manager [instance: 1a74c820-67e9-424c-b4e6-333db3449251] self._vmops.spawn(context, instance, image_meta, injected_files, [ 2268.430856] env[61962]: ERROR nova.compute.manager [instance: 1a74c820-67e9-424c-b4e6-333db3449251] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 2268.430856] env[61962]: ERROR nova.compute.manager [instance: 1a74c820-67e9-424c-b4e6-333db3449251] self._fetch_image_if_missing(context, vi) [ 2268.430856] env[61962]: ERROR nova.compute.manager [instance: 1a74c820-67e9-424c-b4e6-333db3449251] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 2268.430856] env[61962]: ERROR nova.compute.manager [instance: 1a74c820-67e9-424c-b4e6-333db3449251] image_cache(vi, tmp_image_ds_loc) [ 2268.430856] env[61962]: ERROR nova.compute.manager [instance: 1a74c820-67e9-424c-b4e6-333db3449251] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 2268.431467] env[61962]: ERROR nova.compute.manager [instance: 1a74c820-67e9-424c-b4e6-333db3449251] vm_util.copy_virtual_disk( [ 2268.431467] env[61962]: ERROR nova.compute.manager [instance: 1a74c820-67e9-424c-b4e6-333db3449251] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 2268.431467] env[61962]: ERROR nova.compute.manager [instance: 1a74c820-67e9-424c-b4e6-333db3449251] session._wait_for_task(vmdk_copy_task) [ 2268.431467] env[61962]: ERROR nova.compute.manager [instance: 1a74c820-67e9-424c-b4e6-333db3449251] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 2268.431467] env[61962]: ERROR nova.compute.manager [instance: 1a74c820-67e9-424c-b4e6-333db3449251] return self.wait_for_task(task_ref) [ 2268.431467] env[61962]: ERROR nova.compute.manager [instance: 1a74c820-67e9-424c-b4e6-333db3449251] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 2268.431467] env[61962]: ERROR nova.compute.manager [instance: 1a74c820-67e9-424c-b4e6-333db3449251] return evt.wait() [ 2268.431467] env[61962]: ERROR nova.compute.manager [instance: 1a74c820-67e9-424c-b4e6-333db3449251] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 2268.431467] env[61962]: ERROR nova.compute.manager [instance: 1a74c820-67e9-424c-b4e6-333db3449251] result = hub.switch() [ 2268.431467] env[61962]: ERROR nova.compute.manager [instance: 1a74c820-67e9-424c-b4e6-333db3449251] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 2268.431467] env[61962]: ERROR nova.compute.manager [instance: 1a74c820-67e9-424c-b4e6-333db3449251] return self.greenlet.switch() [ 2268.431467] env[61962]: ERROR nova.compute.manager [instance: 1a74c820-67e9-424c-b4e6-333db3449251] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 2268.431467] env[61962]: ERROR nova.compute.manager [instance: 1a74c820-67e9-424c-b4e6-333db3449251] self.f(*self.args, **self.kw) [ 2268.432064] env[61962]: ERROR nova.compute.manager [instance: 1a74c820-67e9-424c-b4e6-333db3449251] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 2268.432064] env[61962]: ERROR nova.compute.manager [instance: 1a74c820-67e9-424c-b4e6-333db3449251] raise exceptions.translate_fault(task_info.error) [ 2268.432064] env[61962]: ERROR nova.compute.manager [instance: 1a74c820-67e9-424c-b4e6-333db3449251] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 2268.432064] env[61962]: ERROR nova.compute.manager [instance: 1a74c820-67e9-424c-b4e6-333db3449251] Faults: ['InvalidArgument'] [ 2268.432064] env[61962]: ERROR nova.compute.manager [instance: 1a74c820-67e9-424c-b4e6-333db3449251] [ 2268.432064] env[61962]: DEBUG nova.compute.utils [None req-baa26604-ba56-4dd3-8c26-a36534ca2fce tempest-ServersTestJSON-536512511 tempest-ServersTestJSON-536512511-project-member] [instance: 1a74c820-67e9-424c-b4e6-333db3449251] VimFaultException {{(pid=61962) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 2268.433039] env[61962]: DEBUG nova.compute.manager [None req-baa26604-ba56-4dd3-8c26-a36534ca2fce tempest-ServersTestJSON-536512511 tempest-ServersTestJSON-536512511-project-member] [instance: 1a74c820-67e9-424c-b4e6-333db3449251] Build of instance 1a74c820-67e9-424c-b4e6-333db3449251 was re-scheduled: A specified parameter was not correct: fileType [ 2268.433039] env[61962]: Faults: ['InvalidArgument'] {{(pid=61962) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2472}} [ 2268.433452] env[61962]: DEBUG nova.compute.manager [None req-baa26604-ba56-4dd3-8c26-a36534ca2fce tempest-ServersTestJSON-536512511 tempest-ServersTestJSON-536512511-project-member] [instance: 1a74c820-67e9-424c-b4e6-333db3449251] Unplugging VIFs for instance {{(pid=61962) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:2998}} [ 2268.433629] env[61962]: DEBUG nova.compute.manager [None req-baa26604-ba56-4dd3-8c26-a36534ca2fce tempest-ServersTestJSON-536512511 tempest-ServersTestJSON-536512511-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=61962) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3021}} [ 2268.433804] env[61962]: DEBUG nova.compute.manager [None req-baa26604-ba56-4dd3-8c26-a36534ca2fce tempest-ServersTestJSON-536512511 tempest-ServersTestJSON-536512511-project-member] [instance: 1a74c820-67e9-424c-b4e6-333db3449251] Deallocating network for instance {{(pid=61962) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2281}} [ 2268.433981] env[61962]: DEBUG nova.network.neutron [None req-baa26604-ba56-4dd3-8c26-a36534ca2fce tempest-ServersTestJSON-536512511 tempest-ServersTestJSON-536512511-project-member] [instance: 1a74c820-67e9-424c-b4e6-333db3449251] deallocate_for_instance() {{(pid=61962) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 2268.718953] env[61962]: DEBUG nova.network.neutron [None req-baa26604-ba56-4dd3-8c26-a36534ca2fce tempest-ServersTestJSON-536512511 tempest-ServersTestJSON-536512511-project-member] [instance: 1a74c820-67e9-424c-b4e6-333db3449251] Updating instance_info_cache with network_info: [] {{(pid=61962) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2268.732273] env[61962]: INFO nova.compute.manager [None req-baa26604-ba56-4dd3-8c26-a36534ca2fce tempest-ServersTestJSON-536512511 tempest-ServersTestJSON-536512511-project-member] [instance: 1a74c820-67e9-424c-b4e6-333db3449251] Took 0.30 seconds to deallocate network for instance. [ 2268.829442] env[61962]: INFO nova.scheduler.client.report [None req-baa26604-ba56-4dd3-8c26-a36534ca2fce tempest-ServersTestJSON-536512511 tempest-ServersTestJSON-536512511-project-member] Deleted allocations for instance 1a74c820-67e9-424c-b4e6-333db3449251 [ 2268.853103] env[61962]: DEBUG oslo_concurrency.lockutils [None req-baa26604-ba56-4dd3-8c26-a36534ca2fce tempest-ServersTestJSON-536512511 tempest-ServersTestJSON-536512511-project-member] Lock "1a74c820-67e9-424c-b4e6-333db3449251" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 369.622s {{(pid=61962) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2268.853700] env[61962]: DEBUG oslo_concurrency.lockutils [None req-6463bc87-9e12-470b-8f3e-ca5bce4cdd8d tempest-ServersTestJSON-536512511 tempest-ServersTestJSON-536512511-project-member] Lock "1a74c820-67e9-424c-b4e6-333db3449251" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 174.152s {{(pid=61962) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2268.853700] env[61962]: DEBUG oslo_concurrency.lockutils [None req-6463bc87-9e12-470b-8f3e-ca5bce4cdd8d tempest-ServersTestJSON-536512511 tempest-ServersTestJSON-536512511-project-member] Acquiring lock "1a74c820-67e9-424c-b4e6-333db3449251-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=61962) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2268.853898] env[61962]: DEBUG oslo_concurrency.lockutils [None req-6463bc87-9e12-470b-8f3e-ca5bce4cdd8d tempest-ServersTestJSON-536512511 tempest-ServersTestJSON-536512511-project-member] Lock "1a74c820-67e9-424c-b4e6-333db3449251-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=61962) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2268.854059] env[61962]: DEBUG oslo_concurrency.lockutils [None req-6463bc87-9e12-470b-8f3e-ca5bce4cdd8d tempest-ServersTestJSON-536512511 tempest-ServersTestJSON-536512511-project-member] Lock "1a74c820-67e9-424c-b4e6-333db3449251-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=61962) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2268.856302] env[61962]: INFO nova.compute.manager [None req-6463bc87-9e12-470b-8f3e-ca5bce4cdd8d tempest-ServersTestJSON-536512511 tempest-ServersTestJSON-536512511-project-member] [instance: 1a74c820-67e9-424c-b4e6-333db3449251] Terminating instance [ 2268.858641] env[61962]: DEBUG nova.compute.manager [None req-6463bc87-9e12-470b-8f3e-ca5bce4cdd8d tempest-ServersTestJSON-536512511 tempest-ServersTestJSON-536512511-project-member] [instance: 1a74c820-67e9-424c-b4e6-333db3449251] Start destroying the instance on the hypervisor. {{(pid=61962) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3142}} [ 2268.858641] env[61962]: DEBUG nova.virt.vmwareapi.vmops [None req-6463bc87-9e12-470b-8f3e-ca5bce4cdd8d tempest-ServersTestJSON-536512511 tempest-ServersTestJSON-536512511-project-member] [instance: 1a74c820-67e9-424c-b4e6-333db3449251] Destroying instance {{(pid=61962) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 2268.859510] env[61962]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-674eeda2-2ce1-47c4-83ce-31d2e4473a4c {{(pid=61962) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2268.868966] env[61962]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d0578fbd-84a1-4c14-bbea-b78ae8f73151 {{(pid=61962) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2268.895274] env[61962]: WARNING nova.virt.vmwareapi.vmops [None req-6463bc87-9e12-470b-8f3e-ca5bce4cdd8d tempest-ServersTestJSON-536512511 tempest-ServersTestJSON-536512511-project-member] [instance: 1a74c820-67e9-424c-b4e6-333db3449251] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 1a74c820-67e9-424c-b4e6-333db3449251 could not be found. [ 2268.895501] env[61962]: DEBUG nova.virt.vmwareapi.vmops [None req-6463bc87-9e12-470b-8f3e-ca5bce4cdd8d tempest-ServersTestJSON-536512511 tempest-ServersTestJSON-536512511-project-member] [instance: 1a74c820-67e9-424c-b4e6-333db3449251] Instance destroyed {{(pid=61962) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 2268.895680] env[61962]: INFO nova.compute.manager [None req-6463bc87-9e12-470b-8f3e-ca5bce4cdd8d tempest-ServersTestJSON-536512511 tempest-ServersTestJSON-536512511-project-member] [instance: 1a74c820-67e9-424c-b4e6-333db3449251] Took 0.04 seconds to destroy the instance on the hypervisor. [ 2268.895935] env[61962]: DEBUG oslo.service.loopingcall [None req-6463bc87-9e12-470b-8f3e-ca5bce4cdd8d tempest-ServersTestJSON-536512511 tempest-ServersTestJSON-536512511-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61962) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 2268.896573] env[61962]: DEBUG nova.compute.manager [-] [instance: 1a74c820-67e9-424c-b4e6-333db3449251] Deallocating network for instance {{(pid=61962) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2281}} [ 2268.896690] env[61962]: DEBUG nova.network.neutron [-] [instance: 1a74c820-67e9-424c-b4e6-333db3449251] deallocate_for_instance() {{(pid=61962) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 2268.974076] env[61962]: DEBUG nova.network.neutron [-] [instance: 1a74c820-67e9-424c-b4e6-333db3449251] Updating instance_info_cache with network_info: [] {{(pid=61962) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2268.982256] env[61962]: INFO nova.compute.manager [-] [instance: 1a74c820-67e9-424c-b4e6-333db3449251] Took 0.09 seconds to deallocate network for instance. [ 2269.084909] env[61962]: DEBUG oslo_concurrency.lockutils [None req-6463bc87-9e12-470b-8f3e-ca5bce4cdd8d tempest-ServersTestJSON-536512511 tempest-ServersTestJSON-536512511-project-member] Lock "1a74c820-67e9-424c-b4e6-333db3449251" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 0.231s {{(pid=61962) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2270.360838] env[61962]: DEBUG oslo_concurrency.lockutils [None req-978a4e84-7dbb-45a2-8510-6ac35af156f6 tempest-ServersTestJSON-536512511 tempest-ServersTestJSON-536512511-project-member] Acquiring lock "cf00153e-76fc-48c4-ac9f-5fa9b616d7de" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61962) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2270.361215] env[61962]: DEBUG oslo_concurrency.lockutils [None req-978a4e84-7dbb-45a2-8510-6ac35af156f6 tempest-ServersTestJSON-536512511 tempest-ServersTestJSON-536512511-project-member] Lock "cf00153e-76fc-48c4-ac9f-5fa9b616d7de" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61962) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2270.373929] env[61962]: DEBUG nova.compute.manager [None req-978a4e84-7dbb-45a2-8510-6ac35af156f6 tempest-ServersTestJSON-536512511 tempest-ServersTestJSON-536512511-project-member] [instance: cf00153e-76fc-48c4-ac9f-5fa9b616d7de] Starting instance... {{(pid=61962) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2424}} [ 2270.428708] env[61962]: DEBUG oslo_concurrency.lockutils [None req-978a4e84-7dbb-45a2-8510-6ac35af156f6 tempest-ServersTestJSON-536512511 tempest-ServersTestJSON-536512511-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61962) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2270.428964] env[61962]: DEBUG oslo_concurrency.lockutils [None req-978a4e84-7dbb-45a2-8510-6ac35af156f6 tempest-ServersTestJSON-536512511 tempest-ServersTestJSON-536512511-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=61962) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2270.430566] env[61962]: INFO nova.compute.claims [None req-978a4e84-7dbb-45a2-8510-6ac35af156f6 tempest-ServersTestJSON-536512511 tempest-ServersTestJSON-536512511-project-member] [instance: cf00153e-76fc-48c4-ac9f-5fa9b616d7de] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 2270.517704] env[61962]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b9213de9-8397-4775-99b0-403d0608cd64 {{(pid=61962) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2270.525773] env[61962]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-498cb29d-7cf6-4034-aa06-c29c2edd89c6 {{(pid=61962) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2270.556380] env[61962]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3116b105-9a05-41c9-b790-e4211c686bf2 {{(pid=61962) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2270.563921] env[61962]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d255fac4-b330-4bbf-99f6-59e1de4ea61b {{(pid=61962) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2270.577172] env[61962]: DEBUG nova.compute.provider_tree [None req-978a4e84-7dbb-45a2-8510-6ac35af156f6 tempest-ServersTestJSON-536512511 tempest-ServersTestJSON-536512511-project-member] Inventory has not changed in ProviderTree for provider: 5a20dc57-fddd-49ec-bab5-953a03eebaa1 {{(pid=61962) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2270.587399] env[61962]: DEBUG nova.scheduler.client.report [None req-978a4e84-7dbb-45a2-8510-6ac35af156f6 tempest-ServersTestJSON-536512511 tempest-ServersTestJSON-536512511-project-member] Inventory has not changed for provider 5a20dc57-fddd-49ec-bab5-953a03eebaa1 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 96, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61962) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 2270.600990] env[61962]: DEBUG oslo_concurrency.lockutils [None req-978a4e84-7dbb-45a2-8510-6ac35af156f6 tempest-ServersTestJSON-536512511 tempest-ServersTestJSON-536512511-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.172s {{(pid=61962) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2270.601480] env[61962]: DEBUG nova.compute.manager [None req-978a4e84-7dbb-45a2-8510-6ac35af156f6 tempest-ServersTestJSON-536512511 tempest-ServersTestJSON-536512511-project-member] [instance: cf00153e-76fc-48c4-ac9f-5fa9b616d7de] Start building networks asynchronously for instance. {{(pid=61962) _build_resources /opt/stack/nova/nova/compute/manager.py:2821}} [ 2270.634713] env[61962]: DEBUG nova.compute.utils [None req-978a4e84-7dbb-45a2-8510-6ac35af156f6 tempest-ServersTestJSON-536512511 tempest-ServersTestJSON-536512511-project-member] Using /dev/sd instead of None {{(pid=61962) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 2270.635983] env[61962]: DEBUG nova.compute.manager [None req-978a4e84-7dbb-45a2-8510-6ac35af156f6 tempest-ServersTestJSON-536512511 tempest-ServersTestJSON-536512511-project-member] [instance: cf00153e-76fc-48c4-ac9f-5fa9b616d7de] Allocating IP information in the background. {{(pid=61962) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1973}} [ 2270.636776] env[61962]: DEBUG nova.network.neutron [None req-978a4e84-7dbb-45a2-8510-6ac35af156f6 tempest-ServersTestJSON-536512511 tempest-ServersTestJSON-536512511-project-member] [instance: cf00153e-76fc-48c4-ac9f-5fa9b616d7de] allocate_for_instance() {{(pid=61962) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 2270.645633] env[61962]: DEBUG nova.compute.manager [None req-978a4e84-7dbb-45a2-8510-6ac35af156f6 tempest-ServersTestJSON-536512511 tempest-ServersTestJSON-536512511-project-member] [instance: cf00153e-76fc-48c4-ac9f-5fa9b616d7de] Start building block device mappings for instance. {{(pid=61962) _build_resources /opt/stack/nova/nova/compute/manager.py:2856}} [ 2270.700288] env[61962]: DEBUG nova.policy [None req-978a4e84-7dbb-45a2-8510-6ac35af156f6 tempest-ServersTestJSON-536512511 tempest-ServersTestJSON-536512511-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '0be80fa75c4c49d899a3dac926c1ed37', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'cc34536d51ef4a22aef0b274ba5aa28e', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61962) authorize /opt/stack/nova/nova/policy.py:203}} [ 2270.713306] env[61962]: DEBUG nova.compute.manager [None req-978a4e84-7dbb-45a2-8510-6ac35af156f6 tempest-ServersTestJSON-536512511 tempest-ServersTestJSON-536512511-project-member] [instance: cf00153e-76fc-48c4-ac9f-5fa9b616d7de] Start spawning the instance on the hypervisor. {{(pid=61962) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2630}} [ 2270.738586] env[61962]: DEBUG nova.virt.hardware [None req-978a4e84-7dbb-45a2-8510-6ac35af156f6 tempest-ServersTestJSON-536512511 tempest-ServersTestJSON-536512511-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-12-01T12:09:23Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=128,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-12-01T12:09:07Z,direct_url=,disk_format='vmdk',id=f684bb17-3ab2-4bd5-a19e-4c36c57d0ebe,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='eef556fb5c4f49b889491ae31a496de5',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-12-01T12:09:08Z,virtual_size=,visibility=), allow threads: False {{(pid=61962) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 2270.738829] env[61962]: DEBUG nova.virt.hardware [None req-978a4e84-7dbb-45a2-8510-6ac35af156f6 tempest-ServersTestJSON-536512511 tempest-ServersTestJSON-536512511-project-member] Flavor limits 0:0:0 {{(pid=61962) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 2270.739024] env[61962]: DEBUG nova.virt.hardware [None req-978a4e84-7dbb-45a2-8510-6ac35af156f6 tempest-ServersTestJSON-536512511 tempest-ServersTestJSON-536512511-project-member] Image limits 0:0:0 {{(pid=61962) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 2270.739218] env[61962]: DEBUG nova.virt.hardware [None req-978a4e84-7dbb-45a2-8510-6ac35af156f6 tempest-ServersTestJSON-536512511 tempest-ServersTestJSON-536512511-project-member] Flavor pref 0:0:0 {{(pid=61962) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 2270.739367] env[61962]: DEBUG nova.virt.hardware [None req-978a4e84-7dbb-45a2-8510-6ac35af156f6 tempest-ServersTestJSON-536512511 tempest-ServersTestJSON-536512511-project-member] Image pref 0:0:0 {{(pid=61962) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 2270.739518] env[61962]: DEBUG nova.virt.hardware [None req-978a4e84-7dbb-45a2-8510-6ac35af156f6 tempest-ServersTestJSON-536512511 tempest-ServersTestJSON-536512511-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61962) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 2270.739780] env[61962]: DEBUG nova.virt.hardware [None req-978a4e84-7dbb-45a2-8510-6ac35af156f6 tempest-ServersTestJSON-536512511 tempest-ServersTestJSON-536512511-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61962) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 2270.739874] env[61962]: DEBUG nova.virt.hardware [None req-978a4e84-7dbb-45a2-8510-6ac35af156f6 tempest-ServersTestJSON-536512511 tempest-ServersTestJSON-536512511-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61962) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 2270.740057] env[61962]: DEBUG nova.virt.hardware [None req-978a4e84-7dbb-45a2-8510-6ac35af156f6 tempest-ServersTestJSON-536512511 tempest-ServersTestJSON-536512511-project-member] Got 1 possible topologies {{(pid=61962) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 2270.740225] env[61962]: DEBUG nova.virt.hardware [None req-978a4e84-7dbb-45a2-8510-6ac35af156f6 tempest-ServersTestJSON-536512511 tempest-ServersTestJSON-536512511-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61962) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 2270.740398] env[61962]: DEBUG nova.virt.hardware [None req-978a4e84-7dbb-45a2-8510-6ac35af156f6 tempest-ServersTestJSON-536512511 tempest-ServersTestJSON-536512511-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61962) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 2270.741320] env[61962]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bcf4bd6e-eef0-4116-bed5-50c248d681a9 {{(pid=61962) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2270.750398] env[61962]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-41ebd0eb-0286-4764-8395-26f7bb534a1e {{(pid=61962) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2271.052975] env[61962]: DEBUG nova.network.neutron [None req-978a4e84-7dbb-45a2-8510-6ac35af156f6 tempest-ServersTestJSON-536512511 tempest-ServersTestJSON-536512511-project-member] [instance: cf00153e-76fc-48c4-ac9f-5fa9b616d7de] Successfully created port: 6ce034fc-7bfe-4a3d-a11f-a2bd38bd15df {{(pid=61962) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 2271.707763] env[61962]: DEBUG nova.compute.manager [req-faf249b9-bf6e-47d3-aebf-a7eadf837985 req-7bf738e6-a22b-4086-95c6-eaca794e81ad service nova] [instance: cf00153e-76fc-48c4-ac9f-5fa9b616d7de] Received event network-vif-plugged-6ce034fc-7bfe-4a3d-a11f-a2bd38bd15df {{(pid=61962) external_instance_event /opt/stack/nova/nova/compute/manager.py:11210}} [ 2271.708044] env[61962]: DEBUG oslo_concurrency.lockutils [req-faf249b9-bf6e-47d3-aebf-a7eadf837985 req-7bf738e6-a22b-4086-95c6-eaca794e81ad service nova] Acquiring lock "cf00153e-76fc-48c4-ac9f-5fa9b616d7de-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=61962) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2271.708202] env[61962]: DEBUG oslo_concurrency.lockutils [req-faf249b9-bf6e-47d3-aebf-a7eadf837985 req-7bf738e6-a22b-4086-95c6-eaca794e81ad service nova] Lock "cf00153e-76fc-48c4-ac9f-5fa9b616d7de-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=61962) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2271.708369] env[61962]: DEBUG oslo_concurrency.lockutils [req-faf249b9-bf6e-47d3-aebf-a7eadf837985 req-7bf738e6-a22b-4086-95c6-eaca794e81ad service nova] Lock "cf00153e-76fc-48c4-ac9f-5fa9b616d7de-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=61962) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2271.708536] env[61962]: DEBUG nova.compute.manager [req-faf249b9-bf6e-47d3-aebf-a7eadf837985 req-7bf738e6-a22b-4086-95c6-eaca794e81ad service nova] [instance: cf00153e-76fc-48c4-ac9f-5fa9b616d7de] No waiting events found dispatching network-vif-plugged-6ce034fc-7bfe-4a3d-a11f-a2bd38bd15df {{(pid=61962) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 2271.708717] env[61962]: WARNING nova.compute.manager [req-faf249b9-bf6e-47d3-aebf-a7eadf837985 req-7bf738e6-a22b-4086-95c6-eaca794e81ad service nova] [instance: cf00153e-76fc-48c4-ac9f-5fa9b616d7de] Received unexpected event network-vif-plugged-6ce034fc-7bfe-4a3d-a11f-a2bd38bd15df for instance with vm_state building and task_state spawning. [ 2271.793122] env[61962]: DEBUG nova.network.neutron [None req-978a4e84-7dbb-45a2-8510-6ac35af156f6 tempest-ServersTestJSON-536512511 tempest-ServersTestJSON-536512511-project-member] [instance: cf00153e-76fc-48c4-ac9f-5fa9b616d7de] Successfully updated port: 6ce034fc-7bfe-4a3d-a11f-a2bd38bd15df {{(pid=61962) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 2271.806551] env[61962]: DEBUG oslo_concurrency.lockutils [None req-978a4e84-7dbb-45a2-8510-6ac35af156f6 tempest-ServersTestJSON-536512511 tempest-ServersTestJSON-536512511-project-member] Acquiring lock "refresh_cache-cf00153e-76fc-48c4-ac9f-5fa9b616d7de" {{(pid=61962) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2271.806779] env[61962]: DEBUG oslo_concurrency.lockutils [None req-978a4e84-7dbb-45a2-8510-6ac35af156f6 tempest-ServersTestJSON-536512511 tempest-ServersTestJSON-536512511-project-member] Acquired lock "refresh_cache-cf00153e-76fc-48c4-ac9f-5fa9b616d7de" {{(pid=61962) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2271.806940] env[61962]: DEBUG nova.network.neutron [None req-978a4e84-7dbb-45a2-8510-6ac35af156f6 tempest-ServersTestJSON-536512511 tempest-ServersTestJSON-536512511-project-member] [instance: cf00153e-76fc-48c4-ac9f-5fa9b616d7de] Building network info cache for instance {{(pid=61962) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 2271.849830] env[61962]: DEBUG nova.network.neutron [None req-978a4e84-7dbb-45a2-8510-6ac35af156f6 tempest-ServersTestJSON-536512511 tempest-ServersTestJSON-536512511-project-member] [instance: cf00153e-76fc-48c4-ac9f-5fa9b616d7de] Instance cache missing network info. {{(pid=61962) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 2272.029517] env[61962]: DEBUG nova.network.neutron [None req-978a4e84-7dbb-45a2-8510-6ac35af156f6 tempest-ServersTestJSON-536512511 tempest-ServersTestJSON-536512511-project-member] [instance: cf00153e-76fc-48c4-ac9f-5fa9b616d7de] Updating instance_info_cache with network_info: [{"id": "6ce034fc-7bfe-4a3d-a11f-a2bd38bd15df", "address": "fa:16:3e:61:8c:32", "network": {"id": "d8431398-17df-4a56-85cb-1034c177202a", "bridge": "br-int", "label": "tempest-ServersTestJSON-1395925254-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "cc34536d51ef4a22aef0b274ba5aa28e", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c4d3f69a-b086-4c3b-b976-5a848b63dfc4", "external-id": "nsx-vlan-transportzone-627", "segmentation_id": 627, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap6ce034fc-7b", "ovs_interfaceid": "6ce034fc-7bfe-4a3d-a11f-a2bd38bd15df", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61962) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2272.042123] env[61962]: DEBUG oslo_concurrency.lockutils [None req-978a4e84-7dbb-45a2-8510-6ac35af156f6 tempest-ServersTestJSON-536512511 tempest-ServersTestJSON-536512511-project-member] Releasing lock "refresh_cache-cf00153e-76fc-48c4-ac9f-5fa9b616d7de" {{(pid=61962) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2272.042582] env[61962]: DEBUG nova.compute.manager [None req-978a4e84-7dbb-45a2-8510-6ac35af156f6 tempest-ServersTestJSON-536512511 tempest-ServersTestJSON-536512511-project-member] [instance: cf00153e-76fc-48c4-ac9f-5fa9b616d7de] Instance network_info: |[{"id": "6ce034fc-7bfe-4a3d-a11f-a2bd38bd15df", "address": "fa:16:3e:61:8c:32", "network": {"id": "d8431398-17df-4a56-85cb-1034c177202a", "bridge": "br-int", "label": "tempest-ServersTestJSON-1395925254-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "cc34536d51ef4a22aef0b274ba5aa28e", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c4d3f69a-b086-4c3b-b976-5a848b63dfc4", "external-id": "nsx-vlan-transportzone-627", "segmentation_id": 627, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap6ce034fc-7b", "ovs_interfaceid": "6ce034fc-7bfe-4a3d-a11f-a2bd38bd15df", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=61962) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 2272.043504] env[61962]: DEBUG nova.virt.vmwareapi.vmops [None req-978a4e84-7dbb-45a2-8510-6ac35af156f6 tempest-ServersTestJSON-536512511 tempest-ServersTestJSON-536512511-project-member] [instance: cf00153e-76fc-48c4-ac9f-5fa9b616d7de] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:61:8c:32', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'c4d3f69a-b086-4c3b-b976-5a848b63dfc4', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '6ce034fc-7bfe-4a3d-a11f-a2bd38bd15df', 'vif_model': 'vmxnet3'}] {{(pid=61962) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 2272.051807] env[61962]: DEBUG oslo.service.loopingcall [None req-978a4e84-7dbb-45a2-8510-6ac35af156f6 tempest-ServersTestJSON-536512511 tempest-ServersTestJSON-536512511-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=61962) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 2272.052356] env[61962]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: cf00153e-76fc-48c4-ac9f-5fa9b616d7de] Creating VM on the ESX host {{(pid=61962) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 2272.052592] env[61962]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-05614be7-7c32-44c7-ad95-46fa3a6ccb3e {{(pid=61962) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2272.074487] env[61962]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 2272.074487] env[61962]: value = "task-4892138" [ 2272.074487] env[61962]: _type = "Task" [ 2272.074487] env[61962]: } to complete. {{(pid=61962) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2272.083491] env[61962]: DEBUG oslo_vmware.api [-] Task: {'id': task-4892138, 'name': CreateVM_Task} progress is 0%. {{(pid=61962) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2272.159618] env[61962]: DEBUG oslo_service.periodic_task [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=61962) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2272.585349] env[61962]: DEBUG oslo_vmware.api [-] Task: {'id': task-4892138, 'name': CreateVM_Task} progress is 99%. {{(pid=61962) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2273.085203] env[61962]: DEBUG oslo_vmware.api [-] Task: {'id': task-4892138, 'name': CreateVM_Task} progress is 99%. {{(pid=61962) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2273.158375] env[61962]: DEBUG oslo_service.periodic_task [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=61962) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2273.158605] env[61962]: DEBUG oslo_service.periodic_task [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=61962) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2273.585935] env[61962]: DEBUG oslo_vmware.api [-] Task: {'id': task-4892138, 'name': CreateVM_Task, 'duration_secs': 1.312284} completed successfully. {{(pid=61962) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2273.586235] env[61962]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: cf00153e-76fc-48c4-ac9f-5fa9b616d7de] Created VM on the ESX host {{(pid=61962) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 2273.586691] env[61962]: DEBUG oslo_concurrency.lockutils [None req-978a4e84-7dbb-45a2-8510-6ac35af156f6 tempest-ServersTestJSON-536512511 tempest-ServersTestJSON-536512511-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/f684bb17-3ab2-4bd5-a19e-4c36c57d0ebe" {{(pid=61962) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2273.586901] env[61962]: DEBUG oslo_concurrency.lockutils [None req-978a4e84-7dbb-45a2-8510-6ac35af156f6 tempest-ServersTestJSON-536512511 tempest-ServersTestJSON-536512511-project-member] Acquired lock "[datastore2] devstack-image-cache_base/f684bb17-3ab2-4bd5-a19e-4c36c57d0ebe" {{(pid=61962) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2273.587221] env[61962]: DEBUG oslo_concurrency.lockutils [None req-978a4e84-7dbb-45a2-8510-6ac35af156f6 tempest-ServersTestJSON-536512511 tempest-ServersTestJSON-536512511-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/f684bb17-3ab2-4bd5-a19e-4c36c57d0ebe" {{(pid=61962) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 2273.587478] env[61962]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-a26868ab-c36c-4c58-a2fc-c1ebe13e9c88 {{(pid=61962) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2273.592868] env[61962]: DEBUG oslo_vmware.api [None req-978a4e84-7dbb-45a2-8510-6ac35af156f6 tempest-ServersTestJSON-536512511 tempest-ServersTestJSON-536512511-project-member] Waiting for the task: (returnval){ [ 2273.592868] env[61962]: value = "session[5210918f-aadc-9b29-42fa-0929c2e42627]52e1493b-4bee-c16b-c112-08ab550d2041" [ 2273.592868] env[61962]: _type = "Task" [ 2273.592868] env[61962]: } to complete. {{(pid=61962) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2273.601796] env[61962]: DEBUG oslo_vmware.api [None req-978a4e84-7dbb-45a2-8510-6ac35af156f6 tempest-ServersTestJSON-536512511 tempest-ServersTestJSON-536512511-project-member] Task: {'id': session[5210918f-aadc-9b29-42fa-0929c2e42627]52e1493b-4bee-c16b-c112-08ab550d2041, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61962) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2273.742091] env[61962]: DEBUG nova.compute.manager [req-1ff1eaf9-8695-44c7-a6fd-44a68d875910 req-b59c713d-deeb-4b08-837a-8fdca7f3aacd service nova] [instance: cf00153e-76fc-48c4-ac9f-5fa9b616d7de] Received event network-changed-6ce034fc-7bfe-4a3d-a11f-a2bd38bd15df {{(pid=61962) external_instance_event /opt/stack/nova/nova/compute/manager.py:11210}} [ 2273.742213] env[61962]: DEBUG nova.compute.manager [req-1ff1eaf9-8695-44c7-a6fd-44a68d875910 req-b59c713d-deeb-4b08-837a-8fdca7f3aacd service nova] [instance: cf00153e-76fc-48c4-ac9f-5fa9b616d7de] Refreshing instance network info cache due to event network-changed-6ce034fc-7bfe-4a3d-a11f-a2bd38bd15df. {{(pid=61962) external_instance_event /opt/stack/nova/nova/compute/manager.py:11215}} [ 2273.742411] env[61962]: DEBUG oslo_concurrency.lockutils [req-1ff1eaf9-8695-44c7-a6fd-44a68d875910 req-b59c713d-deeb-4b08-837a-8fdca7f3aacd service nova] Acquiring lock "refresh_cache-cf00153e-76fc-48c4-ac9f-5fa9b616d7de" {{(pid=61962) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2273.742553] env[61962]: DEBUG oslo_concurrency.lockutils [req-1ff1eaf9-8695-44c7-a6fd-44a68d875910 req-b59c713d-deeb-4b08-837a-8fdca7f3aacd service nova] Acquired lock "refresh_cache-cf00153e-76fc-48c4-ac9f-5fa9b616d7de" {{(pid=61962) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2273.742711] env[61962]: DEBUG nova.network.neutron [req-1ff1eaf9-8695-44c7-a6fd-44a68d875910 req-b59c713d-deeb-4b08-837a-8fdca7f3aacd service nova] [instance: cf00153e-76fc-48c4-ac9f-5fa9b616d7de] Refreshing network info cache for port 6ce034fc-7bfe-4a3d-a11f-a2bd38bd15df {{(pid=61962) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 2273.997182] env[61962]: DEBUG nova.network.neutron [req-1ff1eaf9-8695-44c7-a6fd-44a68d875910 req-b59c713d-deeb-4b08-837a-8fdca7f3aacd service nova] [instance: cf00153e-76fc-48c4-ac9f-5fa9b616d7de] Updated VIF entry in instance network info cache for port 6ce034fc-7bfe-4a3d-a11f-a2bd38bd15df. {{(pid=61962) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 2273.997538] env[61962]: DEBUG nova.network.neutron [req-1ff1eaf9-8695-44c7-a6fd-44a68d875910 req-b59c713d-deeb-4b08-837a-8fdca7f3aacd service nova] [instance: cf00153e-76fc-48c4-ac9f-5fa9b616d7de] Updating instance_info_cache with network_info: [{"id": "6ce034fc-7bfe-4a3d-a11f-a2bd38bd15df", "address": "fa:16:3e:61:8c:32", "network": {"id": "d8431398-17df-4a56-85cb-1034c177202a", "bridge": "br-int", "label": "tempest-ServersTestJSON-1395925254-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "cc34536d51ef4a22aef0b274ba5aa28e", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c4d3f69a-b086-4c3b-b976-5a848b63dfc4", "external-id": "nsx-vlan-transportzone-627", "segmentation_id": 627, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap6ce034fc-7b", "ovs_interfaceid": "6ce034fc-7bfe-4a3d-a11f-a2bd38bd15df", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61962) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2274.007372] env[61962]: DEBUG oslo_concurrency.lockutils [req-1ff1eaf9-8695-44c7-a6fd-44a68d875910 req-b59c713d-deeb-4b08-837a-8fdca7f3aacd service nova] Releasing lock "refresh_cache-cf00153e-76fc-48c4-ac9f-5fa9b616d7de" {{(pid=61962) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2274.103985] env[61962]: DEBUG oslo_concurrency.lockutils [None req-978a4e84-7dbb-45a2-8510-6ac35af156f6 tempest-ServersTestJSON-536512511 tempest-ServersTestJSON-536512511-project-member] Releasing lock "[datastore2] devstack-image-cache_base/f684bb17-3ab2-4bd5-a19e-4c36c57d0ebe" {{(pid=61962) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2274.104387] env[61962]: DEBUG nova.virt.vmwareapi.vmops [None req-978a4e84-7dbb-45a2-8510-6ac35af156f6 tempest-ServersTestJSON-536512511 tempest-ServersTestJSON-536512511-project-member] [instance: cf00153e-76fc-48c4-ac9f-5fa9b616d7de] Processing image f684bb17-3ab2-4bd5-a19e-4c36c57d0ebe {{(pid=61962) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 2274.104469] env[61962]: DEBUG oslo_concurrency.lockutils [None req-978a4e84-7dbb-45a2-8510-6ac35af156f6 tempest-ServersTestJSON-536512511 tempest-ServersTestJSON-536512511-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/f684bb17-3ab2-4bd5-a19e-4c36c57d0ebe/f684bb17-3ab2-4bd5-a19e-4c36c57d0ebe.vmdk" {{(pid=61962) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2317.158027] env[61962]: DEBUG oslo_service.periodic_task [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Running periodic task ComputeManager.update_available_resource {{(pid=61962) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2317.176852] env[61962]: DEBUG oslo_concurrency.lockutils [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=61962) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2317.177089] env[61962]: DEBUG oslo_concurrency.lockutils [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=61962) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2317.177260] env[61962]: DEBUG oslo_concurrency.lockutils [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=61962) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2317.177416] env[61962]: DEBUG nova.compute.resource_tracker [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=61962) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 2317.178596] env[61962]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4dd56de3-54ec-4184-ab2b-52d19237042c {{(pid=61962) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2317.187471] env[61962]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-da6f3ef7-4062-40a8-bcf5-e63fb0ff18e6 {{(pid=61962) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2317.201358] env[61962]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7e6de0d1-953e-470f-ac93-3e7b0e7b36e7 {{(pid=61962) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2317.208086] env[61962]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-10cb3e07-a32b-4814-a4b7-400d767221a4 {{(pid=61962) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2317.237140] env[61962]: DEBUG nova.compute.resource_tracker [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=180557MB free_disk=96GB free_vcpus=48 pci_devices=None {{(pid=61962) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 2317.237306] env[61962]: DEBUG oslo_concurrency.lockutils [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=61962) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2317.237484] env[61962]: DEBUG oslo_concurrency.lockutils [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=61962) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2317.295080] env[61962]: DEBUG nova.compute.resource_tracker [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Instance c2e03a3a-a3ac-4661-90f1-1e895a71b289 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61962) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 2317.295247] env[61962]: DEBUG nova.compute.resource_tracker [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Instance cf00153e-76fc-48c4-ac9f-5fa9b616d7de actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61962) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 2317.295424] env[61962]: DEBUG nova.compute.resource_tracker [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Total usable vcpus: 48, total allocated vcpus: 2 {{(pid=61962) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 2317.295572] env[61962]: DEBUG nova.compute.resource_tracker [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=768MB phys_disk=100GB used_disk=2GB total_vcpus=48 used_vcpus=2 pci_stats=[] stats={'failed_builds': '4', 'num_instances': '2', 'num_vm_building': '2', 'num_task_spawning': '2', 'num_os_type_None': '2', 'num_proj_d09904dce58b4cee83cebb57e6ad26a4': '1', 'io_workload': '2', 'num_proj_cc34536d51ef4a22aef0b274ba5aa28e': '1'} {{(pid=61962) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 2317.331039] env[61962]: WARNING oslo_vmware.rw_handles [None req-421224d2-c9e0-4f53-b6ec-296507e9118a tempest-DeleteServersTestJSON-1634339650 tempest-DeleteServersTestJSON-1634339650-project-member] Error occurred while reading the HTTP response.: http.client.RemoteDisconnected: Remote end closed connection without response [ 2317.331039] env[61962]: ERROR oslo_vmware.rw_handles Traceback (most recent call last): [ 2317.331039] env[61962]: ERROR oslo_vmware.rw_handles File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py", line 283, in close [ 2317.331039] env[61962]: ERROR oslo_vmware.rw_handles self._conn.getresponse() [ 2317.331039] env[61962]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 1375, in getresponse [ 2317.331039] env[61962]: ERROR oslo_vmware.rw_handles response.begin() [ 2317.331039] env[61962]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 318, in begin [ 2317.331039] env[61962]: ERROR oslo_vmware.rw_handles version, status, reason = self._read_status() [ 2317.331039] env[61962]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 287, in _read_status [ 2317.331039] env[61962]: ERROR oslo_vmware.rw_handles raise RemoteDisconnected("Remote end closed connection without" [ 2317.331039] env[61962]: ERROR oslo_vmware.rw_handles http.client.RemoteDisconnected: Remote end closed connection without response [ 2317.331039] env[61962]: ERROR oslo_vmware.rw_handles [ 2317.331528] env[61962]: DEBUG nova.virt.vmwareapi.images [None req-421224d2-c9e0-4f53-b6ec-296507e9118a tempest-DeleteServersTestJSON-1634339650 tempest-DeleteServersTestJSON-1634339650-project-member] [instance: c2e03a3a-a3ac-4661-90f1-1e895a71b289] Downloaded image file data f684bb17-3ab2-4bd5-a19e-4c36c57d0ebe to vmware_temp/4ec24a30-26ce-445f-bc9e-4bbeae161378/f684bb17-3ab2-4bd5-a19e-4c36c57d0ebe/tmp-sparse.vmdk on the data store datastore2 {{(pid=61962) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:258}} [ 2317.333342] env[61962]: DEBUG nova.virt.vmwareapi.vmops [None req-421224d2-c9e0-4f53-b6ec-296507e9118a tempest-DeleteServersTestJSON-1634339650 tempest-DeleteServersTestJSON-1634339650-project-member] [instance: c2e03a3a-a3ac-4661-90f1-1e895a71b289] Caching image {{(pid=61962) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 2317.333577] env[61962]: DEBUG nova.virt.vmwareapi.vm_util [None req-421224d2-c9e0-4f53-b6ec-296507e9118a tempest-DeleteServersTestJSON-1634339650 tempest-DeleteServersTestJSON-1634339650-project-member] Copying Virtual Disk [datastore2] vmware_temp/4ec24a30-26ce-445f-bc9e-4bbeae161378/f684bb17-3ab2-4bd5-a19e-4c36c57d0ebe/tmp-sparse.vmdk to [datastore2] vmware_temp/4ec24a30-26ce-445f-bc9e-4bbeae161378/f684bb17-3ab2-4bd5-a19e-4c36c57d0ebe/f684bb17-3ab2-4bd5-a19e-4c36c57d0ebe.vmdk {{(pid=61962) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 2317.333863] env[61962]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-a7774416-782d-4774-a74d-77215e834e9f {{(pid=61962) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2317.337380] env[61962]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-787d90ba-4a2f-4c0a-bbf6-b6988e4534fc {{(pid=61962) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2317.347640] env[61962]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dd81ef4b-3977-4787-98d7-d58f8fc0ef60 {{(pid=61962) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2317.351019] env[61962]: DEBUG oslo_vmware.api [None req-421224d2-c9e0-4f53-b6ec-296507e9118a tempest-DeleteServersTestJSON-1634339650 tempest-DeleteServersTestJSON-1634339650-project-member] Waiting for the task: (returnval){ [ 2317.351019] env[61962]: value = "task-4892139" [ 2317.351019] env[61962]: _type = "Task" [ 2317.351019] env[61962]: } to complete. {{(pid=61962) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2317.378816] env[61962]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4bcacf13-ccec-4bbd-9468-84ef0e1aac78 {{(pid=61962) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2317.384515] env[61962]: DEBUG oslo_vmware.api [None req-421224d2-c9e0-4f53-b6ec-296507e9118a tempest-DeleteServersTestJSON-1634339650 tempest-DeleteServersTestJSON-1634339650-project-member] Task: {'id': task-4892139, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61962) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2317.389789] env[61962]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c8b07407-75c8-42b8-a6d1-a3cd4d64f702 {{(pid=61962) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2317.403201] env[61962]: DEBUG nova.compute.provider_tree [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Inventory has not changed in ProviderTree for provider: 5a20dc57-fddd-49ec-bab5-953a03eebaa1 {{(pid=61962) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2317.411656] env[61962]: DEBUG nova.scheduler.client.report [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Inventory has not changed for provider 5a20dc57-fddd-49ec-bab5-953a03eebaa1 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 96, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61962) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 2317.425311] env[61962]: DEBUG nova.compute.resource_tracker [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=61962) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 2317.425497] env[61962]: DEBUG oslo_concurrency.lockutils [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 0.188s {{(pid=61962) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2317.861206] env[61962]: DEBUG oslo_vmware.exceptions [None req-421224d2-c9e0-4f53-b6ec-296507e9118a tempest-DeleteServersTestJSON-1634339650 tempest-DeleteServersTestJSON-1634339650-project-member] Fault InvalidArgument not matched. {{(pid=61962) get_fault_class /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/exceptions.py:290}} [ 2317.861527] env[61962]: DEBUG oslo_concurrency.lockutils [None req-421224d2-c9e0-4f53-b6ec-296507e9118a tempest-DeleteServersTestJSON-1634339650 tempest-DeleteServersTestJSON-1634339650-project-member] Releasing lock "[datastore2] devstack-image-cache_base/f684bb17-3ab2-4bd5-a19e-4c36c57d0ebe/f684bb17-3ab2-4bd5-a19e-4c36c57d0ebe.vmdk" {{(pid=61962) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2317.862103] env[61962]: ERROR nova.compute.manager [None req-421224d2-c9e0-4f53-b6ec-296507e9118a tempest-DeleteServersTestJSON-1634339650 tempest-DeleteServersTestJSON-1634339650-project-member] [instance: c2e03a3a-a3ac-4661-90f1-1e895a71b289] Instance failed to spawn: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 2317.862103] env[61962]: Faults: ['InvalidArgument'] [ 2317.862103] env[61962]: ERROR nova.compute.manager [instance: c2e03a3a-a3ac-4661-90f1-1e895a71b289] Traceback (most recent call last): [ 2317.862103] env[61962]: ERROR nova.compute.manager [instance: c2e03a3a-a3ac-4661-90f1-1e895a71b289] File "/opt/stack/nova/nova/compute/manager.py", line 2886, in _build_resources [ 2317.862103] env[61962]: ERROR nova.compute.manager [instance: c2e03a3a-a3ac-4661-90f1-1e895a71b289] yield resources [ 2317.862103] env[61962]: ERROR nova.compute.manager [instance: c2e03a3a-a3ac-4661-90f1-1e895a71b289] File "/opt/stack/nova/nova/compute/manager.py", line 2633, in _build_and_run_instance [ 2317.862103] env[61962]: ERROR nova.compute.manager [instance: c2e03a3a-a3ac-4661-90f1-1e895a71b289] self.driver.spawn(context, instance, image_meta, [ 2317.862103] env[61962]: ERROR nova.compute.manager [instance: c2e03a3a-a3ac-4661-90f1-1e895a71b289] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 2317.862103] env[61962]: ERROR nova.compute.manager [instance: c2e03a3a-a3ac-4661-90f1-1e895a71b289] self._vmops.spawn(context, instance, image_meta, injected_files, [ 2317.862103] env[61962]: ERROR nova.compute.manager [instance: c2e03a3a-a3ac-4661-90f1-1e895a71b289] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 2317.862103] env[61962]: ERROR nova.compute.manager [instance: c2e03a3a-a3ac-4661-90f1-1e895a71b289] self._fetch_image_if_missing(context, vi) [ 2317.862103] env[61962]: ERROR nova.compute.manager [instance: c2e03a3a-a3ac-4661-90f1-1e895a71b289] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 2317.862740] env[61962]: ERROR nova.compute.manager [instance: c2e03a3a-a3ac-4661-90f1-1e895a71b289] image_cache(vi, tmp_image_ds_loc) [ 2317.862740] env[61962]: ERROR nova.compute.manager [instance: c2e03a3a-a3ac-4661-90f1-1e895a71b289] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 2317.862740] env[61962]: ERROR nova.compute.manager [instance: c2e03a3a-a3ac-4661-90f1-1e895a71b289] vm_util.copy_virtual_disk( [ 2317.862740] env[61962]: ERROR nova.compute.manager [instance: c2e03a3a-a3ac-4661-90f1-1e895a71b289] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 2317.862740] env[61962]: ERROR nova.compute.manager [instance: c2e03a3a-a3ac-4661-90f1-1e895a71b289] session._wait_for_task(vmdk_copy_task) [ 2317.862740] env[61962]: ERROR nova.compute.manager [instance: c2e03a3a-a3ac-4661-90f1-1e895a71b289] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 2317.862740] env[61962]: ERROR nova.compute.manager [instance: c2e03a3a-a3ac-4661-90f1-1e895a71b289] return self.wait_for_task(task_ref) [ 2317.862740] env[61962]: ERROR nova.compute.manager [instance: c2e03a3a-a3ac-4661-90f1-1e895a71b289] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 2317.862740] env[61962]: ERROR nova.compute.manager [instance: c2e03a3a-a3ac-4661-90f1-1e895a71b289] return evt.wait() [ 2317.862740] env[61962]: ERROR nova.compute.manager [instance: c2e03a3a-a3ac-4661-90f1-1e895a71b289] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 2317.862740] env[61962]: ERROR nova.compute.manager [instance: c2e03a3a-a3ac-4661-90f1-1e895a71b289] result = hub.switch() [ 2317.862740] env[61962]: ERROR nova.compute.manager [instance: c2e03a3a-a3ac-4661-90f1-1e895a71b289] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 2317.862740] env[61962]: ERROR nova.compute.manager [instance: c2e03a3a-a3ac-4661-90f1-1e895a71b289] return self.greenlet.switch() [ 2317.863221] env[61962]: ERROR nova.compute.manager [instance: c2e03a3a-a3ac-4661-90f1-1e895a71b289] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 2317.863221] env[61962]: ERROR nova.compute.manager [instance: c2e03a3a-a3ac-4661-90f1-1e895a71b289] self.f(*self.args, **self.kw) [ 2317.863221] env[61962]: ERROR nova.compute.manager [instance: c2e03a3a-a3ac-4661-90f1-1e895a71b289] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 2317.863221] env[61962]: ERROR nova.compute.manager [instance: c2e03a3a-a3ac-4661-90f1-1e895a71b289] raise exceptions.translate_fault(task_info.error) [ 2317.863221] env[61962]: ERROR nova.compute.manager [instance: c2e03a3a-a3ac-4661-90f1-1e895a71b289] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 2317.863221] env[61962]: ERROR nova.compute.manager [instance: c2e03a3a-a3ac-4661-90f1-1e895a71b289] Faults: ['InvalidArgument'] [ 2317.863221] env[61962]: ERROR nova.compute.manager [instance: c2e03a3a-a3ac-4661-90f1-1e895a71b289] [ 2317.863221] env[61962]: INFO nova.compute.manager [None req-421224d2-c9e0-4f53-b6ec-296507e9118a tempest-DeleteServersTestJSON-1634339650 tempest-DeleteServersTestJSON-1634339650-project-member] [instance: c2e03a3a-a3ac-4661-90f1-1e895a71b289] Terminating instance [ 2317.864071] env[61962]: DEBUG oslo_concurrency.lockutils [None req-978a4e84-7dbb-45a2-8510-6ac35af156f6 tempest-ServersTestJSON-536512511 tempest-ServersTestJSON-536512511-project-member] Acquired lock "[datastore2] devstack-image-cache_base/f684bb17-3ab2-4bd5-a19e-4c36c57d0ebe/f684bb17-3ab2-4bd5-a19e-4c36c57d0ebe.vmdk" {{(pid=61962) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2317.864283] env[61962]: DEBUG nova.virt.vmwareapi.ds_util [None req-978a4e84-7dbb-45a2-8510-6ac35af156f6 tempest-ServersTestJSON-536512511 tempest-ServersTestJSON-536512511-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=61962) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 2317.864531] env[61962]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-7501fade-3582-462a-a649-6f092ce262e1 {{(pid=61962) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2317.866799] env[61962]: DEBUG nova.compute.manager [None req-421224d2-c9e0-4f53-b6ec-296507e9118a tempest-DeleteServersTestJSON-1634339650 tempest-DeleteServersTestJSON-1634339650-project-member] [instance: c2e03a3a-a3ac-4661-90f1-1e895a71b289] Start destroying the instance on the hypervisor. {{(pid=61962) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3142}} [ 2317.867034] env[61962]: DEBUG nova.virt.vmwareapi.vmops [None req-421224d2-c9e0-4f53-b6ec-296507e9118a tempest-DeleteServersTestJSON-1634339650 tempest-DeleteServersTestJSON-1634339650-project-member] [instance: c2e03a3a-a3ac-4661-90f1-1e895a71b289] Destroying instance {{(pid=61962) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 2317.867772] env[61962]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b037d0b3-5c3d-47aa-8c3b-83cec8ffec95 {{(pid=61962) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2317.875144] env[61962]: DEBUG nova.virt.vmwareapi.vmops [None req-421224d2-c9e0-4f53-b6ec-296507e9118a tempest-DeleteServersTestJSON-1634339650 tempest-DeleteServersTestJSON-1634339650-project-member] [instance: c2e03a3a-a3ac-4661-90f1-1e895a71b289] Unregistering the VM {{(pid=61962) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 2317.875375] env[61962]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-b610ea8b-d4af-4730-86b8-403af0830b21 {{(pid=61962) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2317.878146] env[61962]: DEBUG nova.virt.vmwareapi.ds_util [None req-978a4e84-7dbb-45a2-8510-6ac35af156f6 tempest-ServersTestJSON-536512511 tempest-ServersTestJSON-536512511-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=61962) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 2317.878323] env[61962]: DEBUG nova.virt.vmwareapi.vmops [None req-978a4e84-7dbb-45a2-8510-6ac35af156f6 tempest-ServersTestJSON-536512511 tempest-ServersTestJSON-536512511-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=61962) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 2317.878996] env[61962]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-ab10f86e-7570-4eda-bae4-d3476ec0f451 {{(pid=61962) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2317.884136] env[61962]: DEBUG oslo_vmware.api [None req-978a4e84-7dbb-45a2-8510-6ac35af156f6 tempest-ServersTestJSON-536512511 tempest-ServersTestJSON-536512511-project-member] Waiting for the task: (returnval){ [ 2317.884136] env[61962]: value = "session[5210918f-aadc-9b29-42fa-0929c2e42627]52067dea-8f4e-f924-e251-071b5dcfb3eb" [ 2317.884136] env[61962]: _type = "Task" [ 2317.884136] env[61962]: } to complete. {{(pid=61962) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2317.901350] env[61962]: DEBUG nova.virt.vmwareapi.vmops [None req-978a4e84-7dbb-45a2-8510-6ac35af156f6 tempest-ServersTestJSON-536512511 tempest-ServersTestJSON-536512511-project-member] [instance: cf00153e-76fc-48c4-ac9f-5fa9b616d7de] Preparing fetch location {{(pid=61962) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 2317.901762] env[61962]: DEBUG nova.virt.vmwareapi.ds_util [None req-978a4e84-7dbb-45a2-8510-6ac35af156f6 tempest-ServersTestJSON-536512511 tempest-ServersTestJSON-536512511-project-member] Creating directory with path [datastore2] vmware_temp/42e79439-d7f6-46be-b246-fee389d109d8/f684bb17-3ab2-4bd5-a19e-4c36c57d0ebe {{(pid=61962) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 2317.902109] env[61962]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-6a0bc3c5-285c-4186-bf06-c4f4fa1aefb6 {{(pid=61962) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2317.948590] env[61962]: DEBUG nova.virt.vmwareapi.ds_util [None req-978a4e84-7dbb-45a2-8510-6ac35af156f6 tempest-ServersTestJSON-536512511 tempest-ServersTestJSON-536512511-project-member] Created directory with path [datastore2] vmware_temp/42e79439-d7f6-46be-b246-fee389d109d8/f684bb17-3ab2-4bd5-a19e-4c36c57d0ebe {{(pid=61962) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 2317.948820] env[61962]: DEBUG nova.virt.vmwareapi.vmops [None req-978a4e84-7dbb-45a2-8510-6ac35af156f6 tempest-ServersTestJSON-536512511 tempest-ServersTestJSON-536512511-project-member] [instance: cf00153e-76fc-48c4-ac9f-5fa9b616d7de] Fetch image to [datastore2] vmware_temp/42e79439-d7f6-46be-b246-fee389d109d8/f684bb17-3ab2-4bd5-a19e-4c36c57d0ebe/tmp-sparse.vmdk {{(pid=61962) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 2317.948998] env[61962]: DEBUG nova.virt.vmwareapi.vmops [None req-978a4e84-7dbb-45a2-8510-6ac35af156f6 tempest-ServersTestJSON-536512511 tempest-ServersTestJSON-536512511-project-member] [instance: cf00153e-76fc-48c4-ac9f-5fa9b616d7de] Downloading image file data f684bb17-3ab2-4bd5-a19e-4c36c57d0ebe to [datastore2] vmware_temp/42e79439-d7f6-46be-b246-fee389d109d8/f684bb17-3ab2-4bd5-a19e-4c36c57d0ebe/tmp-sparse.vmdk on the data store datastore2 {{(pid=61962) _fetch_image_as_file /opt/stack/nova/nova/virt/vmwareapi/vmops.py:399}} [ 2317.949853] env[61962]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c6fda7ca-f743-484d-afa1-e118f8b347a7 {{(pid=61962) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2317.957540] env[61962]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b8b0d074-cbd0-41be-9446-9ddad04a65ba {{(pid=61962) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2317.968064] env[61962]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bc5a9da5-e1fd-4959-95ef-ee0d4504ed72 {{(pid=61962) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2317.974937] env[61962]: DEBUG nova.virt.vmwareapi.vmops [None req-421224d2-c9e0-4f53-b6ec-296507e9118a tempest-DeleteServersTestJSON-1634339650 tempest-DeleteServersTestJSON-1634339650-project-member] [instance: c2e03a3a-a3ac-4661-90f1-1e895a71b289] Unregistered the VM {{(pid=61962) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 2317.975340] env[61962]: DEBUG nova.virt.vmwareapi.vmops [None req-421224d2-c9e0-4f53-b6ec-296507e9118a tempest-DeleteServersTestJSON-1634339650 tempest-DeleteServersTestJSON-1634339650-project-member] [instance: c2e03a3a-a3ac-4661-90f1-1e895a71b289] Deleting contents of the VM from datastore datastore2 {{(pid=61962) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 2317.975655] env[61962]: DEBUG nova.virt.vmwareapi.ds_util [None req-421224d2-c9e0-4f53-b6ec-296507e9118a tempest-DeleteServersTestJSON-1634339650 tempest-DeleteServersTestJSON-1634339650-project-member] Deleting the datastore file [datastore2] c2e03a3a-a3ac-4661-90f1-1e895a71b289 {{(pid=61962) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 2317.975986] env[61962]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-3e64ab08-1d63-4f2f-93ee-8cb1b2ca10fc {{(pid=61962) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2318.004280] env[61962]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4473ff47-3c1e-4315-b31e-40bd53cdeceb {{(pid=61962) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2318.008336] env[61962]: DEBUG oslo_vmware.api [None req-421224d2-c9e0-4f53-b6ec-296507e9118a tempest-DeleteServersTestJSON-1634339650 tempest-DeleteServersTestJSON-1634339650-project-member] Waiting for the task: (returnval){ [ 2318.008336] env[61962]: value = "task-4892141" [ 2318.008336] env[61962]: _type = "Task" [ 2318.008336] env[61962]: } to complete. {{(pid=61962) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2318.014100] env[61962]: DEBUG oslo_vmware.service [-] Invoking SessionManager.AcquireGenericServiceTicket with opID=oslo.vmware-01262696-4383-4e1e-8853-ba5d0e09c84a {{(pid=61962) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2318.018633] env[61962]: DEBUG oslo_vmware.api [None req-421224d2-c9e0-4f53-b6ec-296507e9118a tempest-DeleteServersTestJSON-1634339650 tempest-DeleteServersTestJSON-1634339650-project-member] Task: {'id': task-4892141, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61962) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2318.045999] env[61962]: DEBUG nova.virt.vmwareapi.images [None req-978a4e84-7dbb-45a2-8510-6ac35af156f6 tempest-ServersTestJSON-536512511 tempest-ServersTestJSON-536512511-project-member] [instance: cf00153e-76fc-48c4-ac9f-5fa9b616d7de] Downloading image file data f684bb17-3ab2-4bd5-a19e-4c36c57d0ebe to the data store datastore2 {{(pid=61962) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:245}} [ 2318.111399] env[61962]: DEBUG oslo_vmware.rw_handles [None req-978a4e84-7dbb-45a2-8510-6ac35af156f6 tempest-ServersTestJSON-536512511 tempest-ServersTestJSON-536512511-project-member] Creating HTTP connection to write to file with size = 21318656 and URL = https://esx7c2n1.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/42e79439-d7f6-46be-b246-fee389d109d8/f684bb17-3ab2-4bd5-a19e-4c36c57d0ebe/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=61962) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 2318.171166] env[61962]: DEBUG oslo_vmware.rw_handles [None req-978a4e84-7dbb-45a2-8510-6ac35af156f6 tempest-ServersTestJSON-536512511 tempest-ServersTestJSON-536512511-project-member] Completed reading data from the image iterator. {{(pid=61962) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 2318.171166] env[61962]: DEBUG oslo_vmware.rw_handles [None req-978a4e84-7dbb-45a2-8510-6ac35af156f6 tempest-ServersTestJSON-536512511 tempest-ServersTestJSON-536512511-project-member] Closing write handle for https://esx7c2n1.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/42e79439-d7f6-46be-b246-fee389d109d8/f684bb17-3ab2-4bd5-a19e-4c36c57d0ebe/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=61962) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:281}} [ 2318.519727] env[61962]: DEBUG oslo_vmware.api [None req-421224d2-c9e0-4f53-b6ec-296507e9118a tempest-DeleteServersTestJSON-1634339650 tempest-DeleteServersTestJSON-1634339650-project-member] Task: {'id': task-4892141, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.212011} completed successfully. {{(pid=61962) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2318.520037] env[61962]: DEBUG nova.virt.vmwareapi.ds_util [None req-421224d2-c9e0-4f53-b6ec-296507e9118a tempest-DeleteServersTestJSON-1634339650 tempest-DeleteServersTestJSON-1634339650-project-member] Deleted the datastore file {{(pid=61962) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 2318.520232] env[61962]: DEBUG nova.virt.vmwareapi.vmops [None req-421224d2-c9e0-4f53-b6ec-296507e9118a tempest-DeleteServersTestJSON-1634339650 tempest-DeleteServersTestJSON-1634339650-project-member] [instance: c2e03a3a-a3ac-4661-90f1-1e895a71b289] Deleted contents of the VM from datastore datastore2 {{(pid=61962) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 2318.520411] env[61962]: DEBUG nova.virt.vmwareapi.vmops [None req-421224d2-c9e0-4f53-b6ec-296507e9118a tempest-DeleteServersTestJSON-1634339650 tempest-DeleteServersTestJSON-1634339650-project-member] [instance: c2e03a3a-a3ac-4661-90f1-1e895a71b289] Instance destroyed {{(pid=61962) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 2318.520592] env[61962]: INFO nova.compute.manager [None req-421224d2-c9e0-4f53-b6ec-296507e9118a tempest-DeleteServersTestJSON-1634339650 tempest-DeleteServersTestJSON-1634339650-project-member] [instance: c2e03a3a-a3ac-4661-90f1-1e895a71b289] Took 0.65 seconds to destroy the instance on the hypervisor. [ 2318.523166] env[61962]: DEBUG nova.compute.claims [None req-421224d2-c9e0-4f53-b6ec-296507e9118a tempest-DeleteServersTestJSON-1634339650 tempest-DeleteServersTestJSON-1634339650-project-member] [instance: c2e03a3a-a3ac-4661-90f1-1e895a71b289] Aborting claim: {{(pid=61962) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 2318.523361] env[61962]: DEBUG oslo_concurrency.lockutils [None req-421224d2-c9e0-4f53-b6ec-296507e9118a tempest-DeleteServersTestJSON-1634339650 tempest-DeleteServersTestJSON-1634339650-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=61962) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2318.523585] env[61962]: DEBUG oslo_concurrency.lockutils [None req-421224d2-c9e0-4f53-b6ec-296507e9118a tempest-DeleteServersTestJSON-1634339650 tempest-DeleteServersTestJSON-1634339650-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 0.000s {{(pid=61962) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2318.670278] env[61962]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2b528c87-f6b9-4543-bcc0-a94d52464436 {{(pid=61962) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2318.678936] env[61962]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d598de99-1a07-417d-bf7e-badbedfbf63d {{(pid=61962) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2318.710839] env[61962]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-71dd0e40-386b-4dd2-8f92-c7b37a553422 {{(pid=61962) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2318.719229] env[61962]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-941688ac-d7d2-4376-aa42-a43a8e101f3a {{(pid=61962) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2318.733591] env[61962]: DEBUG nova.compute.provider_tree [None req-421224d2-c9e0-4f53-b6ec-296507e9118a tempest-DeleteServersTestJSON-1634339650 tempest-DeleteServersTestJSON-1634339650-project-member] Inventory has not changed in ProviderTree for provider: 5a20dc57-fddd-49ec-bab5-953a03eebaa1 {{(pid=61962) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2318.744740] env[61962]: DEBUG nova.scheduler.client.report [None req-421224d2-c9e0-4f53-b6ec-296507e9118a tempest-DeleteServersTestJSON-1634339650 tempest-DeleteServersTestJSON-1634339650-project-member] Inventory has not changed for provider 5a20dc57-fddd-49ec-bab5-953a03eebaa1 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 96, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61962) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 2318.760017] env[61962]: DEBUG oslo_concurrency.lockutils [None req-421224d2-c9e0-4f53-b6ec-296507e9118a tempest-DeleteServersTestJSON-1634339650 tempest-DeleteServersTestJSON-1634339650-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 0.236s {{(pid=61962) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2318.760601] env[61962]: ERROR nova.compute.manager [None req-421224d2-c9e0-4f53-b6ec-296507e9118a tempest-DeleteServersTestJSON-1634339650 tempest-DeleteServersTestJSON-1634339650-project-member] [instance: c2e03a3a-a3ac-4661-90f1-1e895a71b289] Failed to build and run instance: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 2318.760601] env[61962]: Faults: ['InvalidArgument'] [ 2318.760601] env[61962]: ERROR nova.compute.manager [instance: c2e03a3a-a3ac-4661-90f1-1e895a71b289] Traceback (most recent call last): [ 2318.760601] env[61962]: ERROR nova.compute.manager [instance: c2e03a3a-a3ac-4661-90f1-1e895a71b289] File "/opt/stack/nova/nova/compute/manager.py", line 2633, in _build_and_run_instance [ 2318.760601] env[61962]: ERROR nova.compute.manager [instance: c2e03a3a-a3ac-4661-90f1-1e895a71b289] self.driver.spawn(context, instance, image_meta, [ 2318.760601] env[61962]: ERROR nova.compute.manager [instance: c2e03a3a-a3ac-4661-90f1-1e895a71b289] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 2318.760601] env[61962]: ERROR nova.compute.manager [instance: c2e03a3a-a3ac-4661-90f1-1e895a71b289] self._vmops.spawn(context, instance, image_meta, injected_files, [ 2318.760601] env[61962]: ERROR nova.compute.manager [instance: c2e03a3a-a3ac-4661-90f1-1e895a71b289] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 2318.760601] env[61962]: ERROR nova.compute.manager [instance: c2e03a3a-a3ac-4661-90f1-1e895a71b289] self._fetch_image_if_missing(context, vi) [ 2318.760601] env[61962]: ERROR nova.compute.manager [instance: c2e03a3a-a3ac-4661-90f1-1e895a71b289] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 2318.760601] env[61962]: ERROR nova.compute.manager [instance: c2e03a3a-a3ac-4661-90f1-1e895a71b289] image_cache(vi, tmp_image_ds_loc) [ 2318.760601] env[61962]: ERROR nova.compute.manager [instance: c2e03a3a-a3ac-4661-90f1-1e895a71b289] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 2318.761033] env[61962]: ERROR nova.compute.manager [instance: c2e03a3a-a3ac-4661-90f1-1e895a71b289] vm_util.copy_virtual_disk( [ 2318.761033] env[61962]: ERROR nova.compute.manager [instance: c2e03a3a-a3ac-4661-90f1-1e895a71b289] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 2318.761033] env[61962]: ERROR nova.compute.manager [instance: c2e03a3a-a3ac-4661-90f1-1e895a71b289] session._wait_for_task(vmdk_copy_task) [ 2318.761033] env[61962]: ERROR nova.compute.manager [instance: c2e03a3a-a3ac-4661-90f1-1e895a71b289] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 2318.761033] env[61962]: ERROR nova.compute.manager [instance: c2e03a3a-a3ac-4661-90f1-1e895a71b289] return self.wait_for_task(task_ref) [ 2318.761033] env[61962]: ERROR nova.compute.manager [instance: c2e03a3a-a3ac-4661-90f1-1e895a71b289] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 2318.761033] env[61962]: ERROR nova.compute.manager [instance: c2e03a3a-a3ac-4661-90f1-1e895a71b289] return evt.wait() [ 2318.761033] env[61962]: ERROR nova.compute.manager [instance: c2e03a3a-a3ac-4661-90f1-1e895a71b289] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 2318.761033] env[61962]: ERROR nova.compute.manager [instance: c2e03a3a-a3ac-4661-90f1-1e895a71b289] result = hub.switch() [ 2318.761033] env[61962]: ERROR nova.compute.manager [instance: c2e03a3a-a3ac-4661-90f1-1e895a71b289] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 2318.761033] env[61962]: ERROR nova.compute.manager [instance: c2e03a3a-a3ac-4661-90f1-1e895a71b289] return self.greenlet.switch() [ 2318.761033] env[61962]: ERROR nova.compute.manager [instance: c2e03a3a-a3ac-4661-90f1-1e895a71b289] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 2318.761033] env[61962]: ERROR nova.compute.manager [instance: c2e03a3a-a3ac-4661-90f1-1e895a71b289] self.f(*self.args, **self.kw) [ 2318.761491] env[61962]: ERROR nova.compute.manager [instance: c2e03a3a-a3ac-4661-90f1-1e895a71b289] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 2318.761491] env[61962]: ERROR nova.compute.manager [instance: c2e03a3a-a3ac-4661-90f1-1e895a71b289] raise exceptions.translate_fault(task_info.error) [ 2318.761491] env[61962]: ERROR nova.compute.manager [instance: c2e03a3a-a3ac-4661-90f1-1e895a71b289] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 2318.761491] env[61962]: ERROR nova.compute.manager [instance: c2e03a3a-a3ac-4661-90f1-1e895a71b289] Faults: ['InvalidArgument'] [ 2318.761491] env[61962]: ERROR nova.compute.manager [instance: c2e03a3a-a3ac-4661-90f1-1e895a71b289] [ 2318.761491] env[61962]: DEBUG nova.compute.utils [None req-421224d2-c9e0-4f53-b6ec-296507e9118a tempest-DeleteServersTestJSON-1634339650 tempest-DeleteServersTestJSON-1634339650-project-member] [instance: c2e03a3a-a3ac-4661-90f1-1e895a71b289] VimFaultException {{(pid=61962) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 2318.762949] env[61962]: DEBUG nova.compute.manager [None req-421224d2-c9e0-4f53-b6ec-296507e9118a tempest-DeleteServersTestJSON-1634339650 tempest-DeleteServersTestJSON-1634339650-project-member] [instance: c2e03a3a-a3ac-4661-90f1-1e895a71b289] Build of instance c2e03a3a-a3ac-4661-90f1-1e895a71b289 was re-scheduled: A specified parameter was not correct: fileType [ 2318.762949] env[61962]: Faults: ['InvalidArgument'] {{(pid=61962) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2472}} [ 2318.763361] env[61962]: DEBUG nova.compute.manager [None req-421224d2-c9e0-4f53-b6ec-296507e9118a tempest-DeleteServersTestJSON-1634339650 tempest-DeleteServersTestJSON-1634339650-project-member] [instance: c2e03a3a-a3ac-4661-90f1-1e895a71b289] Unplugging VIFs for instance {{(pid=61962) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:2998}} [ 2318.763539] env[61962]: DEBUG nova.compute.manager [None req-421224d2-c9e0-4f53-b6ec-296507e9118a tempest-DeleteServersTestJSON-1634339650 tempest-DeleteServersTestJSON-1634339650-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=61962) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3021}} [ 2318.763714] env[61962]: DEBUG nova.compute.manager [None req-421224d2-c9e0-4f53-b6ec-296507e9118a tempest-DeleteServersTestJSON-1634339650 tempest-DeleteServersTestJSON-1634339650-project-member] [instance: c2e03a3a-a3ac-4661-90f1-1e895a71b289] Deallocating network for instance {{(pid=61962) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2281}} [ 2318.763885] env[61962]: DEBUG nova.network.neutron [None req-421224d2-c9e0-4f53-b6ec-296507e9118a tempest-DeleteServersTestJSON-1634339650 tempest-DeleteServersTestJSON-1634339650-project-member] [instance: c2e03a3a-a3ac-4661-90f1-1e895a71b289] deallocate_for_instance() {{(pid=61962) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 2319.406394] env[61962]: DEBUG nova.network.neutron [None req-421224d2-c9e0-4f53-b6ec-296507e9118a tempest-DeleteServersTestJSON-1634339650 tempest-DeleteServersTestJSON-1634339650-project-member] [instance: c2e03a3a-a3ac-4661-90f1-1e895a71b289] Updating instance_info_cache with network_info: [] {{(pid=61962) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2319.430593] env[61962]: INFO nova.compute.manager [None req-421224d2-c9e0-4f53-b6ec-296507e9118a tempest-DeleteServersTestJSON-1634339650 tempest-DeleteServersTestJSON-1634339650-project-member] [instance: c2e03a3a-a3ac-4661-90f1-1e895a71b289] Took 0.67 seconds to deallocate network for instance. [ 2319.605530] env[61962]: INFO nova.scheduler.client.report [None req-421224d2-c9e0-4f53-b6ec-296507e9118a tempest-DeleteServersTestJSON-1634339650 tempest-DeleteServersTestJSON-1634339650-project-member] Deleted allocations for instance c2e03a3a-a3ac-4661-90f1-1e895a71b289 [ 2319.642750] env[61962]: DEBUG oslo_concurrency.lockutils [None req-421224d2-c9e0-4f53-b6ec-296507e9118a tempest-DeleteServersTestJSON-1634339650 tempest-DeleteServersTestJSON-1634339650-project-member] Lock "c2e03a3a-a3ac-4661-90f1-1e895a71b289" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 189.149s {{(pid=61962) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2321.815294] env[61962]: DEBUG oslo_concurrency.lockutils [None req-305e3195-a6bd-4c79-9176-7003c0da3a2f tempest-DeleteServersTestJSON-1634339650 tempest-DeleteServersTestJSON-1634339650-project-member] Acquiring lock "12306bfe-5154-4508-bf54-bbe744caee05" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61962) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2321.815710] env[61962]: DEBUG oslo_concurrency.lockutils [None req-305e3195-a6bd-4c79-9176-7003c0da3a2f tempest-DeleteServersTestJSON-1634339650 tempest-DeleteServersTestJSON-1634339650-project-member] Lock "12306bfe-5154-4508-bf54-bbe744caee05" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61962) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2321.828361] env[61962]: DEBUG nova.compute.manager [None req-305e3195-a6bd-4c79-9176-7003c0da3a2f tempest-DeleteServersTestJSON-1634339650 tempest-DeleteServersTestJSON-1634339650-project-member] [instance: 12306bfe-5154-4508-bf54-bbe744caee05] Starting instance... {{(pid=61962) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2424}} [ 2321.878455] env[61962]: DEBUG oslo_concurrency.lockutils [None req-305e3195-a6bd-4c79-9176-7003c0da3a2f tempest-DeleteServersTestJSON-1634339650 tempest-DeleteServersTestJSON-1634339650-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61962) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2321.878703] env[61962]: DEBUG oslo_concurrency.lockutils [None req-305e3195-a6bd-4c79-9176-7003c0da3a2f tempest-DeleteServersTestJSON-1634339650 tempest-DeleteServersTestJSON-1634339650-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=61962) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2321.880131] env[61962]: INFO nova.compute.claims [None req-305e3195-a6bd-4c79-9176-7003c0da3a2f tempest-DeleteServersTestJSON-1634339650 tempest-DeleteServersTestJSON-1634339650-project-member] [instance: 12306bfe-5154-4508-bf54-bbe744caee05] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 2321.962625] env[61962]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-825412a7-1205-4d47-9126-0cf02102aa20 {{(pid=61962) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2321.970817] env[61962]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-625d8913-97b1-45e4-846e-fa18863ba535 {{(pid=61962) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2322.000692] env[61962]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-900a485c-da21-48aa-83d1-f09732921f13 {{(pid=61962) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2322.008039] env[61962]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4de8e507-9a24-47d2-870d-b562395fdece {{(pid=61962) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2322.021977] env[61962]: DEBUG nova.compute.provider_tree [None req-305e3195-a6bd-4c79-9176-7003c0da3a2f tempest-DeleteServersTestJSON-1634339650 tempest-DeleteServersTestJSON-1634339650-project-member] Inventory has not changed in ProviderTree for provider: 5a20dc57-fddd-49ec-bab5-953a03eebaa1 {{(pid=61962) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2322.031936] env[61962]: DEBUG nova.scheduler.client.report [None req-305e3195-a6bd-4c79-9176-7003c0da3a2f tempest-DeleteServersTestJSON-1634339650 tempest-DeleteServersTestJSON-1634339650-project-member] Inventory has not changed for provider 5a20dc57-fddd-49ec-bab5-953a03eebaa1 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 96, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61962) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 2322.045151] env[61962]: DEBUG oslo_concurrency.lockutils [None req-305e3195-a6bd-4c79-9176-7003c0da3a2f tempest-DeleteServersTestJSON-1634339650 tempest-DeleteServersTestJSON-1634339650-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.166s {{(pid=61962) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2322.045609] env[61962]: DEBUG nova.compute.manager [None req-305e3195-a6bd-4c79-9176-7003c0da3a2f tempest-DeleteServersTestJSON-1634339650 tempest-DeleteServersTestJSON-1634339650-project-member] [instance: 12306bfe-5154-4508-bf54-bbe744caee05] Start building networks asynchronously for instance. {{(pid=61962) _build_resources /opt/stack/nova/nova/compute/manager.py:2821}} [ 2322.077742] env[61962]: DEBUG nova.compute.utils [None req-305e3195-a6bd-4c79-9176-7003c0da3a2f tempest-DeleteServersTestJSON-1634339650 tempest-DeleteServersTestJSON-1634339650-project-member] Using /dev/sd instead of None {{(pid=61962) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 2322.079266] env[61962]: DEBUG nova.compute.manager [None req-305e3195-a6bd-4c79-9176-7003c0da3a2f tempest-DeleteServersTestJSON-1634339650 tempest-DeleteServersTestJSON-1634339650-project-member] [instance: 12306bfe-5154-4508-bf54-bbe744caee05] Allocating IP information in the background. {{(pid=61962) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1973}} [ 2322.079458] env[61962]: DEBUG nova.network.neutron [None req-305e3195-a6bd-4c79-9176-7003c0da3a2f tempest-DeleteServersTestJSON-1634339650 tempest-DeleteServersTestJSON-1634339650-project-member] [instance: 12306bfe-5154-4508-bf54-bbe744caee05] allocate_for_instance() {{(pid=61962) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 2322.090061] env[61962]: DEBUG nova.compute.manager [None req-305e3195-a6bd-4c79-9176-7003c0da3a2f tempest-DeleteServersTestJSON-1634339650 tempest-DeleteServersTestJSON-1634339650-project-member] [instance: 12306bfe-5154-4508-bf54-bbe744caee05] Start building block device mappings for instance. {{(pid=61962) _build_resources /opt/stack/nova/nova/compute/manager.py:2856}} [ 2322.160410] env[61962]: DEBUG nova.compute.manager [None req-305e3195-a6bd-4c79-9176-7003c0da3a2f tempest-DeleteServersTestJSON-1634339650 tempest-DeleteServersTestJSON-1634339650-project-member] [instance: 12306bfe-5154-4508-bf54-bbe744caee05] Start spawning the instance on the hypervisor. {{(pid=61962) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2630}} [ 2322.165683] env[61962]: DEBUG nova.policy [None req-305e3195-a6bd-4c79-9176-7003c0da3a2f tempest-DeleteServersTestJSON-1634339650 tempest-DeleteServersTestJSON-1634339650-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'f8b17c7d1eea48db9fef8d05689059e6', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'd09904dce58b4cee83cebb57e6ad26a4', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61962) authorize /opt/stack/nova/nova/policy.py:203}} [ 2322.191210] env[61962]: DEBUG nova.virt.hardware [None req-305e3195-a6bd-4c79-9176-7003c0da3a2f tempest-DeleteServersTestJSON-1634339650 tempest-DeleteServersTestJSON-1634339650-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-12-01T12:09:23Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=128,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-12-01T12:09:07Z,direct_url=,disk_format='vmdk',id=f684bb17-3ab2-4bd5-a19e-4c36c57d0ebe,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='eef556fb5c4f49b889491ae31a496de5',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-12-01T12:09:08Z,virtual_size=,visibility=), allow threads: False {{(pid=61962) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 2322.191517] env[61962]: DEBUG nova.virt.hardware [None req-305e3195-a6bd-4c79-9176-7003c0da3a2f tempest-DeleteServersTestJSON-1634339650 tempest-DeleteServersTestJSON-1634339650-project-member] Flavor limits 0:0:0 {{(pid=61962) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 2322.191735] env[61962]: DEBUG nova.virt.hardware [None req-305e3195-a6bd-4c79-9176-7003c0da3a2f tempest-DeleteServersTestJSON-1634339650 tempest-DeleteServersTestJSON-1634339650-project-member] Image limits 0:0:0 {{(pid=61962) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 2322.191978] env[61962]: DEBUG nova.virt.hardware [None req-305e3195-a6bd-4c79-9176-7003c0da3a2f tempest-DeleteServersTestJSON-1634339650 tempest-DeleteServersTestJSON-1634339650-project-member] Flavor pref 0:0:0 {{(pid=61962) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 2322.192416] env[61962]: DEBUG nova.virt.hardware [None req-305e3195-a6bd-4c79-9176-7003c0da3a2f tempest-DeleteServersTestJSON-1634339650 tempest-DeleteServersTestJSON-1634339650-project-member] Image pref 0:0:0 {{(pid=61962) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 2322.192596] env[61962]: DEBUG nova.virt.hardware [None req-305e3195-a6bd-4c79-9176-7003c0da3a2f tempest-DeleteServersTestJSON-1634339650 tempest-DeleteServersTestJSON-1634339650-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61962) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 2322.192810] env[61962]: DEBUG nova.virt.hardware [None req-305e3195-a6bd-4c79-9176-7003c0da3a2f tempest-DeleteServersTestJSON-1634339650 tempest-DeleteServersTestJSON-1634339650-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61962) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 2322.192973] env[61962]: DEBUG nova.virt.hardware [None req-305e3195-a6bd-4c79-9176-7003c0da3a2f tempest-DeleteServersTestJSON-1634339650 tempest-DeleteServersTestJSON-1634339650-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61962) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 2322.193166] env[61962]: DEBUG nova.virt.hardware [None req-305e3195-a6bd-4c79-9176-7003c0da3a2f tempest-DeleteServersTestJSON-1634339650 tempest-DeleteServersTestJSON-1634339650-project-member] Got 1 possible topologies {{(pid=61962) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 2322.193346] env[61962]: DEBUG nova.virt.hardware [None req-305e3195-a6bd-4c79-9176-7003c0da3a2f tempest-DeleteServersTestJSON-1634339650 tempest-DeleteServersTestJSON-1634339650-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61962) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 2322.193528] env[61962]: DEBUG nova.virt.hardware [None req-305e3195-a6bd-4c79-9176-7003c0da3a2f tempest-DeleteServersTestJSON-1634339650 tempest-DeleteServersTestJSON-1634339650-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61962) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 2322.194427] env[61962]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-55c8e32e-5a0b-4f78-9a06-33775c8ee323 {{(pid=61962) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2322.203273] env[61962]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ef1f64e1-da92-4809-bfd3-780d1747ad72 {{(pid=61962) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2322.426574] env[61962]: DEBUG oslo_service.periodic_task [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=61962) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2322.426764] env[61962]: DEBUG nova.compute.manager [None req-ba337279-4320-40ca-b616-7e590432f203 None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=61962) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10626}} [ 2322.493096] env[61962]: DEBUG nova.network.neutron [None req-305e3195-a6bd-4c79-9176-7003c0da3a2f tempest-DeleteServersTestJSON-1634339650 tempest-DeleteServersTestJSON-1634339650-project-member] [instance: 12306bfe-5154-4508-bf54-bbe744caee05] Successfully created port: b385c28d-e4ca-410a-9add-da655c381587 {{(pid=61962) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 2323.073270] env[61962]: DEBUG nova.compute.manager [req-de34f5ac-aa92-44ff-a32d-e32ddf19b964 req-403b88b2-4a65-4189-b006-9ed2fe251b37 service nova] [instance: 12306bfe-5154-4508-bf54-bbe744caee05] Received event network-vif-plugged-b385c28d-e4ca-410a-9add-da655c381587 {{(pid=61962) external_instance_event /opt/stack/nova/nova/compute/manager.py:11210}} [ 2323.073270] env[61962]: DEBUG oslo_concurrency.lockutils [req-de34f5ac-aa92-44ff-a32d-e32ddf19b964 req-403b88b2-4a65-4189-b006-9ed2fe251b37 service nova] Acquiring lock "12306bfe-5154-4508-bf54-bbe744caee05-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=61962) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2323.073270] env[61962]: DEBUG oslo_concurrency.lockutils [req-de34f5ac-aa92-44ff-a32d-e32ddf19b964 req-403b88b2-4a65-4189-b006-9ed2fe251b37 service nova] Lock "12306bfe-5154-4508-bf54-bbe744caee05-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=61962) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2323.073270] env[61962]: DEBUG oslo_concurrency.lockutils [req-de34f5ac-aa92-44ff-a32d-e32ddf19b964 req-403b88b2-4a65-4189-b006-9ed2fe251b37 service nova] Lock "12306bfe-5154-4508-bf54-bbe744caee05-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=61962) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2323.073641] env[61962]: DEBUG nova.compute.manager [req-de34f5ac-aa92-44ff-a32d-e32ddf19b964 req-403b88b2-4a65-4189-b006-9ed2fe251b37 service nova] [instance: 12306bfe-5154-4508-bf54-bbe744caee05] No waiting events found dispatching network-vif-plugged-b385c28d-e4ca-410a-9add-da655c381587 {{(pid=61962) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 2323.073966] env[61962]: WARNING nova.compute.manager [req-de34f5ac-aa92-44ff-a32d-e32ddf19b964 req-403b88b2-4a65-4189-b006-9ed2fe251b37 service nova] [instance: 12306bfe-5154-4508-bf54-bbe744caee05] Received unexpected event network-vif-plugged-b385c28d-e4ca-410a-9add-da655c381587 for instance with vm_state building and task_state spawning. [ 2323.185129] env[61962]: DEBUG nova.network.neutron [None req-305e3195-a6bd-4c79-9176-7003c0da3a2f tempest-DeleteServersTestJSON-1634339650 tempest-DeleteServersTestJSON-1634339650-project-member] [instance: 12306bfe-5154-4508-bf54-bbe744caee05] Successfully updated port: b385c28d-e4ca-410a-9add-da655c381587 {{(pid=61962) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 2323.195327] env[61962]: DEBUG oslo_concurrency.lockutils [None req-305e3195-a6bd-4c79-9176-7003c0da3a2f tempest-DeleteServersTestJSON-1634339650 tempest-DeleteServersTestJSON-1634339650-project-member] Acquiring lock "refresh_cache-12306bfe-5154-4508-bf54-bbe744caee05" {{(pid=61962) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2323.195490] env[61962]: DEBUG oslo_concurrency.lockutils [None req-305e3195-a6bd-4c79-9176-7003c0da3a2f tempest-DeleteServersTestJSON-1634339650 tempest-DeleteServersTestJSON-1634339650-project-member] Acquired lock "refresh_cache-12306bfe-5154-4508-bf54-bbe744caee05" {{(pid=61962) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2323.196385] env[61962]: DEBUG nova.network.neutron [None req-305e3195-a6bd-4c79-9176-7003c0da3a2f tempest-DeleteServersTestJSON-1634339650 tempest-DeleteServersTestJSON-1634339650-project-member] [instance: 12306bfe-5154-4508-bf54-bbe744caee05] Building network info cache for instance {{(pid=61962) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 2323.242644] env[61962]: DEBUG nova.network.neutron [None req-305e3195-a6bd-4c79-9176-7003c0da3a2f tempest-DeleteServersTestJSON-1634339650 tempest-DeleteServersTestJSON-1634339650-project-member] [instance: 12306bfe-5154-4508-bf54-bbe744caee05] Instance cache missing network info. {{(pid=61962) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 2323.435590] env[61962]: DEBUG nova.network.neutron [None req-305e3195-a6bd-4c79-9176-7003c0da3a2f tempest-DeleteServersTestJSON-1634339650 tempest-DeleteServersTestJSON-1634339650-project-member] [instance: 12306bfe-5154-4508-bf54-bbe744caee05] Updating instance_info_cache with network_info: [{"id": "b385c28d-e4ca-410a-9add-da655c381587", "address": "fa:16:3e:e8:56:1f", "network": {"id": "f502c11b-57b9-4f6c-a30a-2f36f8e77ee0", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-1357472377-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "d09904dce58b4cee83cebb57e6ad26a4", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "0a88b707-352e-4be7-b1d6-ad6074b40ed9", "external-id": "nsx-vlan-transportzone-789", "segmentation_id": 789, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapb385c28d-e4", "ovs_interfaceid": "b385c28d-e4ca-410a-9add-da655c381587", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61962) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2323.450546] env[61962]: DEBUG oslo_concurrency.lockutils [None req-305e3195-a6bd-4c79-9176-7003c0da3a2f tempest-DeleteServersTestJSON-1634339650 tempest-DeleteServersTestJSON-1634339650-project-member] Releasing lock "refresh_cache-12306bfe-5154-4508-bf54-bbe744caee05" {{(pid=61962) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2323.450934] env[61962]: DEBUG nova.compute.manager [None req-305e3195-a6bd-4c79-9176-7003c0da3a2f tempest-DeleteServersTestJSON-1634339650 tempest-DeleteServersTestJSON-1634339650-project-member] [instance: 12306bfe-5154-4508-bf54-bbe744caee05] Instance network_info: |[{"id": "b385c28d-e4ca-410a-9add-da655c381587", "address": "fa:16:3e:e8:56:1f", "network": {"id": "f502c11b-57b9-4f6c-a30a-2f36f8e77ee0", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-1357472377-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "d09904dce58b4cee83cebb57e6ad26a4", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "0a88b707-352e-4be7-b1d6-ad6074b40ed9", "external-id": "nsx-vlan-transportzone-789", "segmentation_id": 789, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapb385c28d-e4", "ovs_interfaceid": "b385c28d-e4ca-410a-9add-da655c381587", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=61962) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 2323.451367] env[61962]: DEBUG nova.virt.vmwareapi.vmops [None req-305e3195-a6bd-4c79-9176-7003c0da3a2f tempest-DeleteServersTestJSON-1634339650 tempest-DeleteServersTestJSON-1634339650-project-member] [instance: 12306bfe-5154-4508-bf54-bbe744caee05] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:e8:56:1f', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '0a88b707-352e-4be7-b1d6-ad6074b40ed9', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'b385c28d-e4ca-410a-9add-da655c381587', 'vif_model': 'vmxnet3'}] {{(pid=61962) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 2323.459074] env[61962]: DEBUG oslo.service.loopingcall [None req-305e3195-a6bd-4c79-9176-7003c0da3a2f tempest-DeleteServersTestJSON-1634339650 tempest-DeleteServersTestJSON-1634339650-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=61962) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 2323.459400] env[61962]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 12306bfe-5154-4508-bf54-bbe744caee05] Creating VM on the ESX host {{(pid=61962) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 2323.459626] env[61962]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-fc36e502-6c9d-4281-b15c-015ad6992cb6 {{(pid=61962) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2323.481332] env[61962]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 2323.481332] env[61962]: value = "task-4892142" [ 2323.481332] env[61962]: _type = "Task" [ 2323.481332] env[61962]: } to complete. {{(pid=61962) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2323.490267] env[61962]: DEBUG oslo_vmware.api [-] Task: {'id': task-4892142, 'name': CreateVM_Task} progress is 0%. {{(pid=61962) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2323.991448] env[61962]: DEBUG oslo_vmware.api [-] Task: {'id': task-4892142, 'name': CreateVM_Task, 'duration_secs': 0.280895} completed successfully. {{(pid=61962) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2323.991666] env[61962]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 12306bfe-5154-4508-bf54-bbe744caee05] Created VM on the ESX host {{(pid=61962) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 2323.992331] env[61962]: DEBUG oslo_concurrency.lockutils [None req-305e3195-a6bd-4c79-9176-7003c0da3a2f tempest-DeleteServersTestJSON-1634339650 tempest-DeleteServersTestJSON-1634339650-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/f684bb17-3ab2-4bd5-a19e-4c36c57d0ebe" {{(pid=61962) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2323.992505] env[61962]: DEBUG oslo_concurrency.lockutils [None req-305e3195-a6bd-4c79-9176-7003c0da3a2f tempest-DeleteServersTestJSON-1634339650 tempest-DeleteServersTestJSON-1634339650-project-member] Acquired lock "[datastore2] devstack-image-cache_base/f684bb17-3ab2-4bd5-a19e-4c36c57d0ebe" {{(pid=61962) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2323.992849] env[61962]: DEBUG oslo_concurrency.lockutils [None req-305e3195-a6bd-4c79-9176-7003c0da3a2f tempest-DeleteServersTestJSON-1634339650 tempest-DeleteServersTestJSON-1634339650-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/f684bb17-3ab2-4bd5-a19e-4c36c57d0ebe" {{(pid=61962) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 2323.993113] env[61962]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-618161e5-cbe1-4044-8f3a-defe44ccf7f8 {{(pid=61962) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2323.997434] env[61962]: DEBUG oslo_vmware.api [None req-305e3195-a6bd-4c79-9176-7003c0da3a2f tempest-DeleteServersTestJSON-1634339650 tempest-DeleteServersTestJSON-1634339650-project-member] Waiting for the task: (returnval){ [ 2323.997434] env[61962]: value = "session[5210918f-aadc-9b29-42fa-0929c2e42627]52e06710-62f2-6ba2-cbaa-5656959ac2a6" [ 2323.997434] env[61962]: _type = "Task" [ 2323.997434] env[61962]: } to complete. {{(pid=61962) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2324.005774] env[61962]: DEBUG oslo_vmware.api [None req-305e3195-a6bd-4c79-9176-7003c0da3a2f tempest-DeleteServersTestJSON-1634339650 tempest-DeleteServersTestJSON-1634339650-project-member] Task: {'id': session[5210918f-aadc-9b29-42fa-0929c2e42627]52e06710-62f2-6ba2-cbaa-5656959ac2a6, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61962) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2324.508540] env[61962]: DEBUG oslo_concurrency.lockutils [None req-305e3195-a6bd-4c79-9176-7003c0da3a2f tempest-DeleteServersTestJSON-1634339650 tempest-DeleteServersTestJSON-1634339650-project-member] Releasing lock "[datastore2] devstack-image-cache_base/f684bb17-3ab2-4bd5-a19e-4c36c57d0ebe" {{(pid=61962) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2324.508928] env[61962]: DEBUG nova.virt.vmwareapi.vmops [None req-305e3195-a6bd-4c79-9176-7003c0da3a2f tempest-DeleteServersTestJSON-1634339650 tempest-DeleteServersTestJSON-1634339650-project-member] [instance: 12306bfe-5154-4508-bf54-bbe744caee05] Processing image f684bb17-3ab2-4bd5-a19e-4c36c57d0ebe {{(pid=61962) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 2324.508997] env[61962]: DEBUG oslo_concurrency.lockutils [None req-305e3195-a6bd-4c79-9176-7003c0da3a2f tempest-DeleteServersTestJSON-1634339650 tempest-DeleteServersTestJSON-1634339650-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/f684bb17-3ab2-4bd5-a19e-4c36c57d0ebe/f684bb17-3ab2-4bd5-a19e-4c36c57d0ebe.vmdk" {{(pid=61962) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2325.106651] env[61962]: DEBUG nova.compute.manager [req-cc4f905c-79bf-4a64-952a-0443d74860f7 req-f9bc4fbb-9b95-4ee7-b82f-ac0034e26782 service nova] [instance: 12306bfe-5154-4508-bf54-bbe744caee05] Received event network-changed-b385c28d-e4ca-410a-9add-da655c381587 {{(pid=61962) external_instance_event /opt/stack/nova/nova/compute/manager.py:11210}} [ 2325.106849] env[61962]: DEBUG nova.compute.manager [req-cc4f905c-79bf-4a64-952a-0443d74860f7 req-f9bc4fbb-9b95-4ee7-b82f-ac0034e26782 service nova] [instance: 12306bfe-5154-4508-bf54-bbe744caee05] Refreshing instance network info cache due to event network-changed-b385c28d-e4ca-410a-9add-da655c381587. {{(pid=61962) external_instance_event /opt/stack/nova/nova/compute/manager.py:11215}} [ 2325.107082] env[61962]: DEBUG oslo_concurrency.lockutils [req-cc4f905c-79bf-4a64-952a-0443d74860f7 req-f9bc4fbb-9b95-4ee7-b82f-ac0034e26782 service nova] Acquiring lock "refresh_cache-12306bfe-5154-4508-bf54-bbe744caee05" {{(pid=61962) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2325.107226] env[61962]: DEBUG oslo_concurrency.lockutils [req-cc4f905c-79bf-4a64-952a-0443d74860f7 req-f9bc4fbb-9b95-4ee7-b82f-ac0034e26782 service nova] Acquired lock "refresh_cache-12306bfe-5154-4508-bf54-bbe744caee05" {{(pid=61962) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2325.107386] env[61962]: DEBUG nova.network.neutron [req-cc4f905c-79bf-4a64-952a-0443d74860f7 req-f9bc4fbb-9b95-4ee7-b82f-ac0034e26782 service nova] [instance: 12306bfe-5154-4508-bf54-bbe744caee05] Refreshing network info cache for port b385c28d-e4ca-410a-9add-da655c381587 {{(pid=61962) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 2325.153620] env[61962]: DEBUG oslo_service.periodic_task [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=61962) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2325.380242] env[61962]: DEBUG nova.network.neutron [req-cc4f905c-79bf-4a64-952a-0443d74860f7 req-f9bc4fbb-9b95-4ee7-b82f-ac0034e26782 service nova] [instance: 12306bfe-5154-4508-bf54-bbe744caee05] Updated VIF entry in instance network info cache for port b385c28d-e4ca-410a-9add-da655c381587. {{(pid=61962) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 2325.380609] env[61962]: DEBUG nova.network.neutron [req-cc4f905c-79bf-4a64-952a-0443d74860f7 req-f9bc4fbb-9b95-4ee7-b82f-ac0034e26782 service nova] [instance: 12306bfe-5154-4508-bf54-bbe744caee05] Updating instance_info_cache with network_info: [{"id": "b385c28d-e4ca-410a-9add-da655c381587", "address": "fa:16:3e:e8:56:1f", "network": {"id": "f502c11b-57b9-4f6c-a30a-2f36f8e77ee0", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-1357472377-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "d09904dce58b4cee83cebb57e6ad26a4", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "0a88b707-352e-4be7-b1d6-ad6074b40ed9", "external-id": "nsx-vlan-transportzone-789", "segmentation_id": 789, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapb385c28d-e4", "ovs_interfaceid": "b385c28d-e4ca-410a-9add-da655c381587", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61962) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2325.390306] env[61962]: DEBUG oslo_concurrency.lockutils [req-cc4f905c-79bf-4a64-952a-0443d74860f7 req-f9bc4fbb-9b95-4ee7-b82f-ac0034e26782 service nova] Releasing lock "refresh_cache-12306bfe-5154-4508-bf54-bbe744caee05" {{(pid=61962) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2327.158052] env[61962]: DEBUG oslo_service.periodic_task [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=61962) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2328.159073] env[61962]: DEBUG oslo_service.periodic_task [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=61962) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2328.159503] env[61962]: DEBUG nova.compute.manager [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Starting heal instance info cache {{(pid=61962) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10007}} [ 2328.159503] env[61962]: DEBUG nova.compute.manager [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Rebuilding the list of instances to heal {{(pid=61962) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10011}} [ 2328.174703] env[61962]: DEBUG nova.compute.manager [None req-ba337279-4320-40ca-b616-7e590432f203 None None] [instance: cf00153e-76fc-48c4-ac9f-5fa9b616d7de] Skipping network cache update for instance because it is Building. {{(pid=61962) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10020}} [ 2328.174869] env[61962]: DEBUG nova.compute.manager [None req-ba337279-4320-40ca-b616-7e590432f203 None None] [instance: 12306bfe-5154-4508-bf54-bbe744caee05] Skipping network cache update for instance because it is Building. {{(pid=61962) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10020}} [ 2328.174997] env[61962]: DEBUG nova.compute.manager [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Didn't find any instances for network info cache update. {{(pid=61962) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10093}} [ 2328.175494] env[61962]: DEBUG oslo_service.periodic_task [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=61962) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2330.481141] env[61962]: DEBUG oslo_service.periodic_task [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Running periodic task ComputeManager._cleanup_running_deleted_instances {{(pid=61962) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2330.481589] env[61962]: DEBUG nova.virt.vmwareapi.vmops [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Getting list of instances from cluster (obj){ [ 2330.481589] env[61962]: value = "domain-c8" [ 2330.481589] env[61962]: _type = "ClusterComputeResource" [ 2330.481589] env[61962]: } {{(pid=61962) list_instances /opt/stack/nova/nova/virt/vmwareapi/vmops.py:2122}} [ 2330.482674] env[61962]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-23f7b171-77a5-49f0-9250-088b317d0e09 {{(pid=61962) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2330.493982] env[61962]: DEBUG nova.virt.vmwareapi.vmops [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Got total of 2 instances {{(pid=61962) list_instances /opt/stack/nova/nova/virt/vmwareapi/vmops.py:2131}} [ 2331.158440] env[61962]: DEBUG oslo_service.periodic_task [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Running periodic task ComputeManager._run_pending_deletes {{(pid=61962) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2331.158649] env[61962]: DEBUG nova.compute.manager [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Cleaning up deleted instances {{(pid=61962) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11307}} [ 2331.173156] env[61962]: DEBUG nova.compute.manager [None req-ba337279-4320-40ca-b616-7e590432f203 None None] There are 1 instances to clean {{(pid=61962) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11316}} [ 2331.173403] env[61962]: DEBUG nova.compute.manager [None req-ba337279-4320-40ca-b616-7e590432f203 None None] [instance: 2a520bab-558e-49e8-9816-c1acc00663f6] Instance has had 0 of 5 cleanup attempts {{(pid=61962) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11320}} [ 2332.158210] env[61962]: DEBUG oslo_service.periodic_task [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Running periodic task ComputeManager._sync_scheduler_instance_info {{(pid=61962) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2332.170952] env[61962]: DEBUG oslo_service.periodic_task [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=61962) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2332.171258] env[61962]: DEBUG oslo_service.periodic_task [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Running periodic task ComputeManager._cleanup_incomplete_migrations {{(pid=61962) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2332.171392] env[61962]: DEBUG nova.compute.manager [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Cleaning up deleted instances with incomplete migration {{(pid=61962) _cleanup_incomplete_migrations /opt/stack/nova/nova/compute/manager.py:11345}} [ 2333.167779] env[61962]: DEBUG oslo_service.periodic_task [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=61962) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2334.158671] env[61962]: DEBUG oslo_service.periodic_task [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=61962) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2337.158321] env[61962]: DEBUG oslo_service.periodic_task [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Running periodic task ComputeManager._cleanup_expired_console_auth_tokens {{(pid=61962) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2345.464431] env[61962]: DEBUG oslo_service.periodic_task [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Running periodic task ComputeManager._sync_power_states {{(pid=61962) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2345.476536] env[61962]: DEBUG nova.virt.vmwareapi.vmops [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Getting list of instances from cluster (obj){ [ 2345.476536] env[61962]: value = "domain-c8" [ 2345.476536] env[61962]: _type = "ClusterComputeResource" [ 2345.476536] env[61962]: } {{(pid=61962) list_instances /opt/stack/nova/nova/virt/vmwareapi/vmops.py:2122}} [ 2345.477796] env[61962]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d3eaae7e-4198-4d48-ab3e-20dcde44ce6c {{(pid=61962) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2345.489141] env[61962]: DEBUG nova.virt.vmwareapi.vmops [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Got total of 2 instances {{(pid=61962) list_instances /opt/stack/nova/nova/virt/vmwareapi/vmops.py:2131}} [ 2345.489331] env[61962]: DEBUG nova.compute.manager [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Triggering sync for uuid cf00153e-76fc-48c4-ac9f-5fa9b616d7de {{(pid=61962) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10417}} [ 2345.489504] env[61962]: DEBUG nova.compute.manager [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Triggering sync for uuid 12306bfe-5154-4508-bf54-bbe744caee05 {{(pid=61962) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10417}} [ 2345.489803] env[61962]: DEBUG oslo_concurrency.lockutils [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Acquiring lock "cf00153e-76fc-48c4-ac9f-5fa9b616d7de" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=61962) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2345.490044] env[61962]: DEBUG oslo_concurrency.lockutils [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Acquiring lock "12306bfe-5154-4508-bf54-bbe744caee05" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=61962) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2367.106629] env[61962]: WARNING oslo_vmware.rw_handles [None req-978a4e84-7dbb-45a2-8510-6ac35af156f6 tempest-ServersTestJSON-536512511 tempest-ServersTestJSON-536512511-project-member] Error occurred while reading the HTTP response.: http.client.RemoteDisconnected: Remote end closed connection without response [ 2367.106629] env[61962]: ERROR oslo_vmware.rw_handles Traceback (most recent call last): [ 2367.106629] env[61962]: ERROR oslo_vmware.rw_handles File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py", line 283, in close [ 2367.106629] env[61962]: ERROR oslo_vmware.rw_handles self._conn.getresponse() [ 2367.106629] env[61962]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 1375, in getresponse [ 2367.106629] env[61962]: ERROR oslo_vmware.rw_handles response.begin() [ 2367.106629] env[61962]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 318, in begin [ 2367.106629] env[61962]: ERROR oslo_vmware.rw_handles version, status, reason = self._read_status() [ 2367.106629] env[61962]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 287, in _read_status [ 2367.106629] env[61962]: ERROR oslo_vmware.rw_handles raise RemoteDisconnected("Remote end closed connection without" [ 2367.106629] env[61962]: ERROR oslo_vmware.rw_handles http.client.RemoteDisconnected: Remote end closed connection without response [ 2367.106629] env[61962]: ERROR oslo_vmware.rw_handles [ 2367.107591] env[61962]: DEBUG nova.virt.vmwareapi.images [None req-978a4e84-7dbb-45a2-8510-6ac35af156f6 tempest-ServersTestJSON-536512511 tempest-ServersTestJSON-536512511-project-member] [instance: cf00153e-76fc-48c4-ac9f-5fa9b616d7de] Downloaded image file data f684bb17-3ab2-4bd5-a19e-4c36c57d0ebe to vmware_temp/42e79439-d7f6-46be-b246-fee389d109d8/f684bb17-3ab2-4bd5-a19e-4c36c57d0ebe/tmp-sparse.vmdk on the data store datastore2 {{(pid=61962) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:258}} [ 2367.109027] env[61962]: DEBUG nova.virt.vmwareapi.vmops [None req-978a4e84-7dbb-45a2-8510-6ac35af156f6 tempest-ServersTestJSON-536512511 tempest-ServersTestJSON-536512511-project-member] [instance: cf00153e-76fc-48c4-ac9f-5fa9b616d7de] Caching image {{(pid=61962) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 2367.109279] env[61962]: DEBUG nova.virt.vmwareapi.vm_util [None req-978a4e84-7dbb-45a2-8510-6ac35af156f6 tempest-ServersTestJSON-536512511 tempest-ServersTestJSON-536512511-project-member] Copying Virtual Disk [datastore2] vmware_temp/42e79439-d7f6-46be-b246-fee389d109d8/f684bb17-3ab2-4bd5-a19e-4c36c57d0ebe/tmp-sparse.vmdk to [datastore2] vmware_temp/42e79439-d7f6-46be-b246-fee389d109d8/f684bb17-3ab2-4bd5-a19e-4c36c57d0ebe/f684bb17-3ab2-4bd5-a19e-4c36c57d0ebe.vmdk {{(pid=61962) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 2367.109565] env[61962]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-f3aafb23-a7b5-4578-a5ec-8b965b4afef6 {{(pid=61962) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2367.118352] env[61962]: DEBUG oslo_vmware.api [None req-978a4e84-7dbb-45a2-8510-6ac35af156f6 tempest-ServersTestJSON-536512511 tempest-ServersTestJSON-536512511-project-member] Waiting for the task: (returnval){ [ 2367.118352] env[61962]: value = "task-4892143" [ 2367.118352] env[61962]: _type = "Task" [ 2367.118352] env[61962]: } to complete. {{(pid=61962) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2367.126768] env[61962]: DEBUG oslo_vmware.api [None req-978a4e84-7dbb-45a2-8510-6ac35af156f6 tempest-ServersTestJSON-536512511 tempest-ServersTestJSON-536512511-project-member] Task: {'id': task-4892143, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61962) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2367.629800] env[61962]: DEBUG oslo_vmware.exceptions [None req-978a4e84-7dbb-45a2-8510-6ac35af156f6 tempest-ServersTestJSON-536512511 tempest-ServersTestJSON-536512511-project-member] Fault InvalidArgument not matched. {{(pid=61962) get_fault_class /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/exceptions.py:290}} [ 2367.630155] env[61962]: DEBUG oslo_concurrency.lockutils [None req-978a4e84-7dbb-45a2-8510-6ac35af156f6 tempest-ServersTestJSON-536512511 tempest-ServersTestJSON-536512511-project-member] Releasing lock "[datastore2] devstack-image-cache_base/f684bb17-3ab2-4bd5-a19e-4c36c57d0ebe/f684bb17-3ab2-4bd5-a19e-4c36c57d0ebe.vmdk" {{(pid=61962) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2367.630651] env[61962]: ERROR nova.compute.manager [None req-978a4e84-7dbb-45a2-8510-6ac35af156f6 tempest-ServersTestJSON-536512511 tempest-ServersTestJSON-536512511-project-member] [instance: cf00153e-76fc-48c4-ac9f-5fa9b616d7de] Instance failed to spawn: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 2367.630651] env[61962]: Faults: ['InvalidArgument'] [ 2367.630651] env[61962]: ERROR nova.compute.manager [instance: cf00153e-76fc-48c4-ac9f-5fa9b616d7de] Traceback (most recent call last): [ 2367.630651] env[61962]: ERROR nova.compute.manager [instance: cf00153e-76fc-48c4-ac9f-5fa9b616d7de] File "/opt/stack/nova/nova/compute/manager.py", line 2886, in _build_resources [ 2367.630651] env[61962]: ERROR nova.compute.manager [instance: cf00153e-76fc-48c4-ac9f-5fa9b616d7de] yield resources [ 2367.630651] env[61962]: ERROR nova.compute.manager [instance: cf00153e-76fc-48c4-ac9f-5fa9b616d7de] File "/opt/stack/nova/nova/compute/manager.py", line 2633, in _build_and_run_instance [ 2367.630651] env[61962]: ERROR nova.compute.manager [instance: cf00153e-76fc-48c4-ac9f-5fa9b616d7de] self.driver.spawn(context, instance, image_meta, [ 2367.630651] env[61962]: ERROR nova.compute.manager [instance: cf00153e-76fc-48c4-ac9f-5fa9b616d7de] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 2367.630651] env[61962]: ERROR nova.compute.manager [instance: cf00153e-76fc-48c4-ac9f-5fa9b616d7de] self._vmops.spawn(context, instance, image_meta, injected_files, [ 2367.630651] env[61962]: ERROR nova.compute.manager [instance: cf00153e-76fc-48c4-ac9f-5fa9b616d7de] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 2367.630651] env[61962]: ERROR nova.compute.manager [instance: cf00153e-76fc-48c4-ac9f-5fa9b616d7de] self._fetch_image_if_missing(context, vi) [ 2367.630651] env[61962]: ERROR nova.compute.manager [instance: cf00153e-76fc-48c4-ac9f-5fa9b616d7de] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 2367.630651] env[61962]: ERROR nova.compute.manager [instance: cf00153e-76fc-48c4-ac9f-5fa9b616d7de] image_cache(vi, tmp_image_ds_loc) [ 2367.631358] env[61962]: ERROR nova.compute.manager [instance: cf00153e-76fc-48c4-ac9f-5fa9b616d7de] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 2367.631358] env[61962]: ERROR nova.compute.manager [instance: cf00153e-76fc-48c4-ac9f-5fa9b616d7de] vm_util.copy_virtual_disk( [ 2367.631358] env[61962]: ERROR nova.compute.manager [instance: cf00153e-76fc-48c4-ac9f-5fa9b616d7de] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 2367.631358] env[61962]: ERROR nova.compute.manager [instance: cf00153e-76fc-48c4-ac9f-5fa9b616d7de] session._wait_for_task(vmdk_copy_task) [ 2367.631358] env[61962]: ERROR nova.compute.manager [instance: cf00153e-76fc-48c4-ac9f-5fa9b616d7de] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 2367.631358] env[61962]: ERROR nova.compute.manager [instance: cf00153e-76fc-48c4-ac9f-5fa9b616d7de] return self.wait_for_task(task_ref) [ 2367.631358] env[61962]: ERROR nova.compute.manager [instance: cf00153e-76fc-48c4-ac9f-5fa9b616d7de] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 2367.631358] env[61962]: ERROR nova.compute.manager [instance: cf00153e-76fc-48c4-ac9f-5fa9b616d7de] return evt.wait() [ 2367.631358] env[61962]: ERROR nova.compute.manager [instance: cf00153e-76fc-48c4-ac9f-5fa9b616d7de] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 2367.631358] env[61962]: ERROR nova.compute.manager [instance: cf00153e-76fc-48c4-ac9f-5fa9b616d7de] result = hub.switch() [ 2367.631358] env[61962]: ERROR nova.compute.manager [instance: cf00153e-76fc-48c4-ac9f-5fa9b616d7de] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 2367.631358] env[61962]: ERROR nova.compute.manager [instance: cf00153e-76fc-48c4-ac9f-5fa9b616d7de] return self.greenlet.switch() [ 2367.631358] env[61962]: ERROR nova.compute.manager [instance: cf00153e-76fc-48c4-ac9f-5fa9b616d7de] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 2367.632843] env[61962]: ERROR nova.compute.manager [instance: cf00153e-76fc-48c4-ac9f-5fa9b616d7de] self.f(*self.args, **self.kw) [ 2367.632843] env[61962]: ERROR nova.compute.manager [instance: cf00153e-76fc-48c4-ac9f-5fa9b616d7de] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 2367.632843] env[61962]: ERROR nova.compute.manager [instance: cf00153e-76fc-48c4-ac9f-5fa9b616d7de] raise exceptions.translate_fault(task_info.error) [ 2367.632843] env[61962]: ERROR nova.compute.manager [instance: cf00153e-76fc-48c4-ac9f-5fa9b616d7de] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 2367.632843] env[61962]: ERROR nova.compute.manager [instance: cf00153e-76fc-48c4-ac9f-5fa9b616d7de] Faults: ['InvalidArgument'] [ 2367.632843] env[61962]: ERROR nova.compute.manager [instance: cf00153e-76fc-48c4-ac9f-5fa9b616d7de] [ 2367.632843] env[61962]: INFO nova.compute.manager [None req-978a4e84-7dbb-45a2-8510-6ac35af156f6 tempest-ServersTestJSON-536512511 tempest-ServersTestJSON-536512511-project-member] [instance: cf00153e-76fc-48c4-ac9f-5fa9b616d7de] Terminating instance [ 2367.632843] env[61962]: DEBUG oslo_concurrency.lockutils [None req-305e3195-a6bd-4c79-9176-7003c0da3a2f tempest-DeleteServersTestJSON-1634339650 tempest-DeleteServersTestJSON-1634339650-project-member] Acquired lock "[datastore2] devstack-image-cache_base/f684bb17-3ab2-4bd5-a19e-4c36c57d0ebe/f684bb17-3ab2-4bd5-a19e-4c36c57d0ebe.vmdk" {{(pid=61962) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2367.632843] env[61962]: DEBUG nova.virt.vmwareapi.ds_util [None req-305e3195-a6bd-4c79-9176-7003c0da3a2f tempest-DeleteServersTestJSON-1634339650 tempest-DeleteServersTestJSON-1634339650-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=61962) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 2367.633686] env[61962]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-85d012f1-d97d-478e-a029-7ac2a7d6c2c0 {{(pid=61962) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2367.635656] env[61962]: DEBUG nova.compute.manager [None req-978a4e84-7dbb-45a2-8510-6ac35af156f6 tempest-ServersTestJSON-536512511 tempest-ServersTestJSON-536512511-project-member] [instance: cf00153e-76fc-48c4-ac9f-5fa9b616d7de] Start destroying the instance on the hypervisor. {{(pid=61962) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3142}} [ 2367.635852] env[61962]: DEBUG nova.virt.vmwareapi.vmops [None req-978a4e84-7dbb-45a2-8510-6ac35af156f6 tempest-ServersTestJSON-536512511 tempest-ServersTestJSON-536512511-project-member] [instance: cf00153e-76fc-48c4-ac9f-5fa9b616d7de] Destroying instance {{(pid=61962) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 2367.636623] env[61962]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ee80e0b9-040e-42bc-9d9a-0f666bf14259 {{(pid=61962) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2367.644442] env[61962]: DEBUG nova.virt.vmwareapi.vmops [None req-978a4e84-7dbb-45a2-8510-6ac35af156f6 tempest-ServersTestJSON-536512511 tempest-ServersTestJSON-536512511-project-member] [instance: cf00153e-76fc-48c4-ac9f-5fa9b616d7de] Unregistering the VM {{(pid=61962) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 2367.644442] env[61962]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-732bfc31-2faf-4725-97aa-485366753770 {{(pid=61962) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2367.646764] env[61962]: DEBUG nova.virt.vmwareapi.ds_util [None req-305e3195-a6bd-4c79-9176-7003c0da3a2f tempest-DeleteServersTestJSON-1634339650 tempest-DeleteServersTestJSON-1634339650-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=61962) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 2367.646945] env[61962]: DEBUG nova.virt.vmwareapi.vmops [None req-305e3195-a6bd-4c79-9176-7003c0da3a2f tempest-DeleteServersTestJSON-1634339650 tempest-DeleteServersTestJSON-1634339650-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=61962) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 2367.647971] env[61962]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-b2a30f75-1949-4a60-894a-5cf1a36b0c9d {{(pid=61962) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2367.653753] env[61962]: DEBUG oslo_vmware.api [None req-305e3195-a6bd-4c79-9176-7003c0da3a2f tempest-DeleteServersTestJSON-1634339650 tempest-DeleteServersTestJSON-1634339650-project-member] Waiting for the task: (returnval){ [ 2367.653753] env[61962]: value = "session[5210918f-aadc-9b29-42fa-0929c2e42627]52e795d3-51ff-9a27-b51d-4533c5a51bdb" [ 2367.653753] env[61962]: _type = "Task" [ 2367.653753] env[61962]: } to complete. {{(pid=61962) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2367.661626] env[61962]: DEBUG oslo_vmware.api [None req-305e3195-a6bd-4c79-9176-7003c0da3a2f tempest-DeleteServersTestJSON-1634339650 tempest-DeleteServersTestJSON-1634339650-project-member] Task: {'id': session[5210918f-aadc-9b29-42fa-0929c2e42627]52e795d3-51ff-9a27-b51d-4533c5a51bdb, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61962) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2367.717600] env[61962]: DEBUG nova.virt.vmwareapi.vmops [None req-978a4e84-7dbb-45a2-8510-6ac35af156f6 tempest-ServersTestJSON-536512511 tempest-ServersTestJSON-536512511-project-member] [instance: cf00153e-76fc-48c4-ac9f-5fa9b616d7de] Unregistered the VM {{(pid=61962) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 2367.717938] env[61962]: DEBUG nova.virt.vmwareapi.vmops [None req-978a4e84-7dbb-45a2-8510-6ac35af156f6 tempest-ServersTestJSON-536512511 tempest-ServersTestJSON-536512511-project-member] [instance: cf00153e-76fc-48c4-ac9f-5fa9b616d7de] Deleting contents of the VM from datastore datastore2 {{(pid=61962) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 2367.718163] env[61962]: DEBUG nova.virt.vmwareapi.ds_util [None req-978a4e84-7dbb-45a2-8510-6ac35af156f6 tempest-ServersTestJSON-536512511 tempest-ServersTestJSON-536512511-project-member] Deleting the datastore file [datastore2] cf00153e-76fc-48c4-ac9f-5fa9b616d7de {{(pid=61962) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 2367.718501] env[61962]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-8f2cbcfb-9a0b-4732-94ab-b5defcb9b43f {{(pid=61962) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2367.725590] env[61962]: DEBUG oslo_vmware.api [None req-978a4e84-7dbb-45a2-8510-6ac35af156f6 tempest-ServersTestJSON-536512511 tempest-ServersTestJSON-536512511-project-member] Waiting for the task: (returnval){ [ 2367.725590] env[61962]: value = "task-4892145" [ 2367.725590] env[61962]: _type = "Task" [ 2367.725590] env[61962]: } to complete. {{(pid=61962) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2367.733945] env[61962]: DEBUG oslo_vmware.api [None req-978a4e84-7dbb-45a2-8510-6ac35af156f6 tempest-ServersTestJSON-536512511 tempest-ServersTestJSON-536512511-project-member] Task: {'id': task-4892145, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61962) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2368.163842] env[61962]: DEBUG nova.virt.vmwareapi.vmops [None req-305e3195-a6bd-4c79-9176-7003c0da3a2f tempest-DeleteServersTestJSON-1634339650 tempest-DeleteServersTestJSON-1634339650-project-member] [instance: 12306bfe-5154-4508-bf54-bbe744caee05] Preparing fetch location {{(pid=61962) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 2368.164352] env[61962]: DEBUG nova.virt.vmwareapi.ds_util [None req-305e3195-a6bd-4c79-9176-7003c0da3a2f tempest-DeleteServersTestJSON-1634339650 tempest-DeleteServersTestJSON-1634339650-project-member] Creating directory with path [datastore2] vmware_temp/0fcbcff8-9092-41d2-aeaa-6448c6525840/f684bb17-3ab2-4bd5-a19e-4c36c57d0ebe {{(pid=61962) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 2368.164352] env[61962]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-de0eed50-b26b-49ae-bd4c-95c2b8f9cf71 {{(pid=61962) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2368.209560] env[61962]: DEBUG nova.virt.vmwareapi.ds_util [None req-305e3195-a6bd-4c79-9176-7003c0da3a2f tempest-DeleteServersTestJSON-1634339650 tempest-DeleteServersTestJSON-1634339650-project-member] Created directory with path [datastore2] vmware_temp/0fcbcff8-9092-41d2-aeaa-6448c6525840/f684bb17-3ab2-4bd5-a19e-4c36c57d0ebe {{(pid=61962) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 2368.209762] env[61962]: DEBUG nova.virt.vmwareapi.vmops [None req-305e3195-a6bd-4c79-9176-7003c0da3a2f tempest-DeleteServersTestJSON-1634339650 tempest-DeleteServersTestJSON-1634339650-project-member] [instance: 12306bfe-5154-4508-bf54-bbe744caee05] Fetch image to [datastore2] vmware_temp/0fcbcff8-9092-41d2-aeaa-6448c6525840/f684bb17-3ab2-4bd5-a19e-4c36c57d0ebe/tmp-sparse.vmdk {{(pid=61962) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 2368.209921] env[61962]: DEBUG nova.virt.vmwareapi.vmops [None req-305e3195-a6bd-4c79-9176-7003c0da3a2f tempest-DeleteServersTestJSON-1634339650 tempest-DeleteServersTestJSON-1634339650-project-member] [instance: 12306bfe-5154-4508-bf54-bbe744caee05] Downloading image file data f684bb17-3ab2-4bd5-a19e-4c36c57d0ebe to [datastore2] vmware_temp/0fcbcff8-9092-41d2-aeaa-6448c6525840/f684bb17-3ab2-4bd5-a19e-4c36c57d0ebe/tmp-sparse.vmdk on the data store datastore2 {{(pid=61962) _fetch_image_as_file /opt/stack/nova/nova/virt/vmwareapi/vmops.py:399}} [ 2368.210836] env[61962]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9d9d466c-326f-4b69-ad1c-6c5bd4811e2f {{(pid=61962) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2368.218506] env[61962]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0d8f3be4-65bf-4a1f-8606-a8821478768b {{(pid=61962) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2368.231014] env[61962]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-51f7319a-596c-4b25-9d98-c8a1355cced7 {{(pid=61962) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2368.239741] env[61962]: DEBUG oslo_vmware.api [None req-978a4e84-7dbb-45a2-8510-6ac35af156f6 tempest-ServersTestJSON-536512511 tempest-ServersTestJSON-536512511-project-member] Task: {'id': task-4892145, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.091415} completed successfully. {{(pid=61962) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2368.265844] env[61962]: DEBUG nova.virt.vmwareapi.ds_util [None req-978a4e84-7dbb-45a2-8510-6ac35af156f6 tempest-ServersTestJSON-536512511 tempest-ServersTestJSON-536512511-project-member] Deleted the datastore file {{(pid=61962) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 2368.266788] env[61962]: DEBUG nova.virt.vmwareapi.vmops [None req-978a4e84-7dbb-45a2-8510-6ac35af156f6 tempest-ServersTestJSON-536512511 tempest-ServersTestJSON-536512511-project-member] [instance: cf00153e-76fc-48c4-ac9f-5fa9b616d7de] Deleted contents of the VM from datastore datastore2 {{(pid=61962) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 2368.267027] env[61962]: DEBUG nova.virt.vmwareapi.vmops [None req-978a4e84-7dbb-45a2-8510-6ac35af156f6 tempest-ServersTestJSON-536512511 tempest-ServersTestJSON-536512511-project-member] [instance: cf00153e-76fc-48c4-ac9f-5fa9b616d7de] Instance destroyed {{(pid=61962) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 2368.267215] env[61962]: INFO nova.compute.manager [None req-978a4e84-7dbb-45a2-8510-6ac35af156f6 tempest-ServersTestJSON-536512511 tempest-ServersTestJSON-536512511-project-member] [instance: cf00153e-76fc-48c4-ac9f-5fa9b616d7de] Took 0.63 seconds to destroy the instance on the hypervisor. [ 2368.270043] env[61962]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-495ab13c-b475-4d80-9898-ce489c5e0d2c {{(pid=61962) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2368.272437] env[61962]: DEBUG nova.compute.claims [None req-978a4e84-7dbb-45a2-8510-6ac35af156f6 tempest-ServersTestJSON-536512511 tempest-ServersTestJSON-536512511-project-member] [instance: cf00153e-76fc-48c4-ac9f-5fa9b616d7de] Aborting claim: {{(pid=61962) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 2368.272617] env[61962]: DEBUG oslo_concurrency.lockutils [None req-978a4e84-7dbb-45a2-8510-6ac35af156f6 tempest-ServersTestJSON-536512511 tempest-ServersTestJSON-536512511-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=61962) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2368.272852] env[61962]: DEBUG oslo_concurrency.lockutils [None req-978a4e84-7dbb-45a2-8510-6ac35af156f6 tempest-ServersTestJSON-536512511 tempest-ServersTestJSON-536512511-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 0.000s {{(pid=61962) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2368.279857] env[61962]: DEBUG oslo_vmware.service [-] Invoking SessionManager.AcquireGenericServiceTicket with opID=oslo.vmware-8e5f5533-6464-4302-929b-a82a81b91426 {{(pid=61962) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2368.304754] env[61962]: DEBUG nova.virt.vmwareapi.images [None req-305e3195-a6bd-4c79-9176-7003c0da3a2f tempest-DeleteServersTestJSON-1634339650 tempest-DeleteServersTestJSON-1634339650-project-member] [instance: 12306bfe-5154-4508-bf54-bbe744caee05] Downloading image file data f684bb17-3ab2-4bd5-a19e-4c36c57d0ebe to the data store datastore2 {{(pid=61962) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:245}} [ 2368.362595] env[61962]: DEBUG oslo_vmware.rw_handles [None req-305e3195-a6bd-4c79-9176-7003c0da3a2f tempest-DeleteServersTestJSON-1634339650 tempest-DeleteServersTestJSON-1634339650-project-member] Creating HTTP connection to write to file with size = 21318656 and URL = https://esx7c1n1.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/0fcbcff8-9092-41d2-aeaa-6448c6525840/f684bb17-3ab2-4bd5-a19e-4c36c57d0ebe/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=61962) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 2368.419544] env[61962]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dfc01470-b415-401d-9887-9b8b1681c039 {{(pid=61962) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2368.427144] env[61962]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c2a0f051-c77a-4d2a-b615-6dbab624eb13 {{(pid=61962) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2368.430673] env[61962]: DEBUG oslo_vmware.rw_handles [None req-305e3195-a6bd-4c79-9176-7003c0da3a2f tempest-DeleteServersTestJSON-1634339650 tempest-DeleteServersTestJSON-1634339650-project-member] Completed reading data from the image iterator. {{(pid=61962) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 2368.430858] env[61962]: DEBUG oslo_vmware.rw_handles [None req-305e3195-a6bd-4c79-9176-7003c0da3a2f tempest-DeleteServersTestJSON-1634339650 tempest-DeleteServersTestJSON-1634339650-project-member] Closing write handle for https://esx7c1n1.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/0fcbcff8-9092-41d2-aeaa-6448c6525840/f684bb17-3ab2-4bd5-a19e-4c36c57d0ebe/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=61962) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:281}} [ 2368.461284] env[61962]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-28d8b8a4-b97e-4f72-93cf-f48717205e6e {{(pid=61962) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2368.469588] env[61962]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2888c275-7d73-4b98-8348-0cef8ae60ae4 {{(pid=61962) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2368.483829] env[61962]: DEBUG nova.compute.provider_tree [None req-978a4e84-7dbb-45a2-8510-6ac35af156f6 tempest-ServersTestJSON-536512511 tempest-ServersTestJSON-536512511-project-member] Inventory has not changed in ProviderTree for provider: 5a20dc57-fddd-49ec-bab5-953a03eebaa1 {{(pid=61962) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2368.495993] env[61962]: DEBUG nova.scheduler.client.report [None req-978a4e84-7dbb-45a2-8510-6ac35af156f6 tempest-ServersTestJSON-536512511 tempest-ServersTestJSON-536512511-project-member] Inventory has not changed for provider 5a20dc57-fddd-49ec-bab5-953a03eebaa1 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 96, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61962) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 2368.510631] env[61962]: DEBUG oslo_concurrency.lockutils [None req-978a4e84-7dbb-45a2-8510-6ac35af156f6 tempest-ServersTestJSON-536512511 tempest-ServersTestJSON-536512511-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 0.238s {{(pid=61962) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2368.511332] env[61962]: ERROR nova.compute.manager [None req-978a4e84-7dbb-45a2-8510-6ac35af156f6 tempest-ServersTestJSON-536512511 tempest-ServersTestJSON-536512511-project-member] [instance: cf00153e-76fc-48c4-ac9f-5fa9b616d7de] Failed to build and run instance: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 2368.511332] env[61962]: Faults: ['InvalidArgument'] [ 2368.511332] env[61962]: ERROR nova.compute.manager [instance: cf00153e-76fc-48c4-ac9f-5fa9b616d7de] Traceback (most recent call last): [ 2368.511332] env[61962]: ERROR nova.compute.manager [instance: cf00153e-76fc-48c4-ac9f-5fa9b616d7de] File "/opt/stack/nova/nova/compute/manager.py", line 2633, in _build_and_run_instance [ 2368.511332] env[61962]: ERROR nova.compute.manager [instance: cf00153e-76fc-48c4-ac9f-5fa9b616d7de] self.driver.spawn(context, instance, image_meta, [ 2368.511332] env[61962]: ERROR nova.compute.manager [instance: cf00153e-76fc-48c4-ac9f-5fa9b616d7de] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 2368.511332] env[61962]: ERROR nova.compute.manager [instance: cf00153e-76fc-48c4-ac9f-5fa9b616d7de] self._vmops.spawn(context, instance, image_meta, injected_files, [ 2368.511332] env[61962]: ERROR nova.compute.manager [instance: cf00153e-76fc-48c4-ac9f-5fa9b616d7de] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 2368.511332] env[61962]: ERROR nova.compute.manager [instance: cf00153e-76fc-48c4-ac9f-5fa9b616d7de] self._fetch_image_if_missing(context, vi) [ 2368.511332] env[61962]: ERROR nova.compute.manager [instance: cf00153e-76fc-48c4-ac9f-5fa9b616d7de] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 2368.511332] env[61962]: ERROR nova.compute.manager [instance: cf00153e-76fc-48c4-ac9f-5fa9b616d7de] image_cache(vi, tmp_image_ds_loc) [ 2368.511332] env[61962]: ERROR nova.compute.manager [instance: cf00153e-76fc-48c4-ac9f-5fa9b616d7de] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 2368.512605] env[61962]: ERROR nova.compute.manager [instance: cf00153e-76fc-48c4-ac9f-5fa9b616d7de] vm_util.copy_virtual_disk( [ 2368.512605] env[61962]: ERROR nova.compute.manager [instance: cf00153e-76fc-48c4-ac9f-5fa9b616d7de] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 2368.512605] env[61962]: ERROR nova.compute.manager [instance: cf00153e-76fc-48c4-ac9f-5fa9b616d7de] session._wait_for_task(vmdk_copy_task) [ 2368.512605] env[61962]: ERROR nova.compute.manager [instance: cf00153e-76fc-48c4-ac9f-5fa9b616d7de] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 2368.512605] env[61962]: ERROR nova.compute.manager [instance: cf00153e-76fc-48c4-ac9f-5fa9b616d7de] return self.wait_for_task(task_ref) [ 2368.512605] env[61962]: ERROR nova.compute.manager [instance: cf00153e-76fc-48c4-ac9f-5fa9b616d7de] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 2368.512605] env[61962]: ERROR nova.compute.manager [instance: cf00153e-76fc-48c4-ac9f-5fa9b616d7de] return evt.wait() [ 2368.512605] env[61962]: ERROR nova.compute.manager [instance: cf00153e-76fc-48c4-ac9f-5fa9b616d7de] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 2368.512605] env[61962]: ERROR nova.compute.manager [instance: cf00153e-76fc-48c4-ac9f-5fa9b616d7de] result = hub.switch() [ 2368.512605] env[61962]: ERROR nova.compute.manager [instance: cf00153e-76fc-48c4-ac9f-5fa9b616d7de] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 2368.512605] env[61962]: ERROR nova.compute.manager [instance: cf00153e-76fc-48c4-ac9f-5fa9b616d7de] return self.greenlet.switch() [ 2368.512605] env[61962]: ERROR nova.compute.manager [instance: cf00153e-76fc-48c4-ac9f-5fa9b616d7de] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 2368.512605] env[61962]: ERROR nova.compute.manager [instance: cf00153e-76fc-48c4-ac9f-5fa9b616d7de] self.f(*self.args, **self.kw) [ 2368.512994] env[61962]: ERROR nova.compute.manager [instance: cf00153e-76fc-48c4-ac9f-5fa9b616d7de] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 2368.512994] env[61962]: ERROR nova.compute.manager [instance: cf00153e-76fc-48c4-ac9f-5fa9b616d7de] raise exceptions.translate_fault(task_info.error) [ 2368.512994] env[61962]: ERROR nova.compute.manager [instance: cf00153e-76fc-48c4-ac9f-5fa9b616d7de] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 2368.512994] env[61962]: ERROR nova.compute.manager [instance: cf00153e-76fc-48c4-ac9f-5fa9b616d7de] Faults: ['InvalidArgument'] [ 2368.512994] env[61962]: ERROR nova.compute.manager [instance: cf00153e-76fc-48c4-ac9f-5fa9b616d7de] [ 2368.512994] env[61962]: DEBUG nova.compute.utils [None req-978a4e84-7dbb-45a2-8510-6ac35af156f6 tempest-ServersTestJSON-536512511 tempest-ServersTestJSON-536512511-project-member] [instance: cf00153e-76fc-48c4-ac9f-5fa9b616d7de] VimFaultException {{(pid=61962) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 2368.513802] env[61962]: DEBUG nova.compute.manager [None req-978a4e84-7dbb-45a2-8510-6ac35af156f6 tempest-ServersTestJSON-536512511 tempest-ServersTestJSON-536512511-project-member] [instance: cf00153e-76fc-48c4-ac9f-5fa9b616d7de] Build of instance cf00153e-76fc-48c4-ac9f-5fa9b616d7de was re-scheduled: A specified parameter was not correct: fileType [ 2368.513802] env[61962]: Faults: ['InvalidArgument'] {{(pid=61962) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2472}} [ 2368.514191] env[61962]: DEBUG nova.compute.manager [None req-978a4e84-7dbb-45a2-8510-6ac35af156f6 tempest-ServersTestJSON-536512511 tempest-ServersTestJSON-536512511-project-member] [instance: cf00153e-76fc-48c4-ac9f-5fa9b616d7de] Unplugging VIFs for instance {{(pid=61962) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:2998}} [ 2368.514374] env[61962]: DEBUG nova.compute.manager [None req-978a4e84-7dbb-45a2-8510-6ac35af156f6 tempest-ServersTestJSON-536512511 tempest-ServersTestJSON-536512511-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=61962) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3021}} [ 2368.514617] env[61962]: DEBUG nova.compute.manager [None req-978a4e84-7dbb-45a2-8510-6ac35af156f6 tempest-ServersTestJSON-536512511 tempest-ServersTestJSON-536512511-project-member] [instance: cf00153e-76fc-48c4-ac9f-5fa9b616d7de] Deallocating network for instance {{(pid=61962) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2281}} [ 2368.514728] env[61962]: DEBUG nova.network.neutron [None req-978a4e84-7dbb-45a2-8510-6ac35af156f6 tempest-ServersTestJSON-536512511 tempest-ServersTestJSON-536512511-project-member] [instance: cf00153e-76fc-48c4-ac9f-5fa9b616d7de] deallocate_for_instance() {{(pid=61962) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 2368.817428] env[61962]: DEBUG nova.network.neutron [None req-978a4e84-7dbb-45a2-8510-6ac35af156f6 tempest-ServersTestJSON-536512511 tempest-ServersTestJSON-536512511-project-member] [instance: cf00153e-76fc-48c4-ac9f-5fa9b616d7de] Updating instance_info_cache with network_info: [] {{(pid=61962) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2368.830497] env[61962]: INFO nova.compute.manager [None req-978a4e84-7dbb-45a2-8510-6ac35af156f6 tempest-ServersTestJSON-536512511 tempest-ServersTestJSON-536512511-project-member] [instance: cf00153e-76fc-48c4-ac9f-5fa9b616d7de] Took 0.32 seconds to deallocate network for instance. [ 2368.952884] env[61962]: INFO nova.scheduler.client.report [None req-978a4e84-7dbb-45a2-8510-6ac35af156f6 tempest-ServersTestJSON-536512511 tempest-ServersTestJSON-536512511-project-member] Deleted allocations for instance cf00153e-76fc-48c4-ac9f-5fa9b616d7de [ 2368.987234] env[61962]: DEBUG oslo_concurrency.lockutils [None req-978a4e84-7dbb-45a2-8510-6ac35af156f6 tempest-ServersTestJSON-536512511 tempest-ServersTestJSON-536512511-project-member] Lock "cf00153e-76fc-48c4-ac9f-5fa9b616d7de" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 98.626s {{(pid=61962) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2368.987504] env[61962]: DEBUG oslo_concurrency.lockutils [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Lock "cf00153e-76fc-48c4-ac9f-5fa9b616d7de" acquired by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: waited 23.498s {{(pid=61962) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2368.987731] env[61962]: INFO nova.compute.manager [None req-ba337279-4320-40ca-b616-7e590432f203 None None] [instance: cf00153e-76fc-48c4-ac9f-5fa9b616d7de] During sync_power_state the instance has a pending task (spawning). Skip. [ 2368.987923] env[61962]: DEBUG oslo_concurrency.lockutils [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Lock "cf00153e-76fc-48c4-ac9f-5fa9b616d7de" "released" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: held 0.000s {{(pid=61962) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2370.585735] env[61962]: DEBUG oslo_concurrency.lockutils [None req-b8b2c342-a551-47c8-b1d9-5b30d70e320f tempest-ServersTestJSON-536512511 tempest-ServersTestJSON-536512511-project-member] Acquiring lock "c78c4445-e980-47c4-a3cf-8db9b370d099" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61962) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2370.586107] env[61962]: DEBUG oslo_concurrency.lockutils [None req-b8b2c342-a551-47c8-b1d9-5b30d70e320f tempest-ServersTestJSON-536512511 tempest-ServersTestJSON-536512511-project-member] Lock "c78c4445-e980-47c4-a3cf-8db9b370d099" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61962) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2370.600658] env[61962]: DEBUG nova.compute.manager [None req-b8b2c342-a551-47c8-b1d9-5b30d70e320f tempest-ServersTestJSON-536512511 tempest-ServersTestJSON-536512511-project-member] [instance: c78c4445-e980-47c4-a3cf-8db9b370d099] Starting instance... {{(pid=61962) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2424}} [ 2370.654980] env[61962]: DEBUG oslo_concurrency.lockutils [None req-b8b2c342-a551-47c8-b1d9-5b30d70e320f tempest-ServersTestJSON-536512511 tempest-ServersTestJSON-536512511-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61962) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2370.655248] env[61962]: DEBUG oslo_concurrency.lockutils [None req-b8b2c342-a551-47c8-b1d9-5b30d70e320f tempest-ServersTestJSON-536512511 tempest-ServersTestJSON-536512511-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=61962) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2370.657292] env[61962]: INFO nova.compute.claims [None req-b8b2c342-a551-47c8-b1d9-5b30d70e320f tempest-ServersTestJSON-536512511 tempest-ServersTestJSON-536512511-project-member] [instance: c78c4445-e980-47c4-a3cf-8db9b370d099] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 2370.731941] env[61962]: DEBUG nova.scheduler.client.report [None req-b8b2c342-a551-47c8-b1d9-5b30d70e320f tempest-ServersTestJSON-536512511 tempest-ServersTestJSON-536512511-project-member] Refreshing inventories for resource provider 5a20dc57-fddd-49ec-bab5-953a03eebaa1 {{(pid=61962) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:818}} [ 2370.759037] env[61962]: DEBUG nova.scheduler.client.report [None req-b8b2c342-a551-47c8-b1d9-5b30d70e320f tempest-ServersTestJSON-536512511 tempest-ServersTestJSON-536512511-project-member] Updating ProviderTree inventory for provider 5a20dc57-fddd-49ec-bab5-953a03eebaa1 from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 96, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61962) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:782}} [ 2370.759338] env[61962]: DEBUG nova.compute.provider_tree [None req-b8b2c342-a551-47c8-b1d9-5b30d70e320f tempest-ServersTestJSON-536512511 tempest-ServersTestJSON-536512511-project-member] Updating inventory in ProviderTree for provider 5a20dc57-fddd-49ec-bab5-953a03eebaa1 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 96, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61962) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 2370.785831] env[61962]: DEBUG nova.scheduler.client.report [None req-b8b2c342-a551-47c8-b1d9-5b30d70e320f tempest-ServersTestJSON-536512511 tempest-ServersTestJSON-536512511-project-member] Refreshing aggregate associations for resource provider 5a20dc57-fddd-49ec-bab5-953a03eebaa1, aggregates: None {{(pid=61962) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:827}} [ 2370.831140] env[61962]: DEBUG nova.scheduler.client.report [None req-b8b2c342-a551-47c8-b1d9-5b30d70e320f tempest-ServersTestJSON-536512511 tempest-ServersTestJSON-536512511-project-member] Refreshing trait associations for resource provider 5a20dc57-fddd-49ec-bab5-953a03eebaa1, traits: COMPUTE_IMAGE_TYPE_ISO,COMPUTE_NODE,COMPUTE_IMAGE_TYPE_VMDK,COMPUTE_NET_ATTACH_INTERFACE,COMPUTE_SAME_HOST_COLD_MIGRATE {{(pid=61962) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:839}} [ 2370.874487] env[61962]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5597a207-03ac-4337-a0da-d3479120c80a {{(pid=61962) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2370.882672] env[61962]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-44f59be8-81eb-43b5-a7e0-1385a45ea4c3 {{(pid=61962) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2370.914190] env[61962]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5ef2f15d-a763-4a62-b290-7afa2d7a4437 {{(pid=61962) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2370.922607] env[61962]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e38da393-c88e-4d53-a5f8-6a1d82a242a2 {{(pid=61962) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2370.936784] env[61962]: DEBUG nova.compute.provider_tree [None req-b8b2c342-a551-47c8-b1d9-5b30d70e320f tempest-ServersTestJSON-536512511 tempest-ServersTestJSON-536512511-project-member] Inventory has not changed in ProviderTree for provider: 5a20dc57-fddd-49ec-bab5-953a03eebaa1 {{(pid=61962) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2370.946086] env[61962]: DEBUG nova.scheduler.client.report [None req-b8b2c342-a551-47c8-b1d9-5b30d70e320f tempest-ServersTestJSON-536512511 tempest-ServersTestJSON-536512511-project-member] Inventory has not changed for provider 5a20dc57-fddd-49ec-bab5-953a03eebaa1 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 96, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61962) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 2370.959791] env[61962]: DEBUG oslo_concurrency.lockutils [None req-b8b2c342-a551-47c8-b1d9-5b30d70e320f tempest-ServersTestJSON-536512511 tempest-ServersTestJSON-536512511-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.304s {{(pid=61962) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2370.960297] env[61962]: DEBUG nova.compute.manager [None req-b8b2c342-a551-47c8-b1d9-5b30d70e320f tempest-ServersTestJSON-536512511 tempest-ServersTestJSON-536512511-project-member] [instance: c78c4445-e980-47c4-a3cf-8db9b370d099] Start building networks asynchronously for instance. {{(pid=61962) _build_resources /opt/stack/nova/nova/compute/manager.py:2821}} [ 2370.992120] env[61962]: DEBUG nova.compute.utils [None req-b8b2c342-a551-47c8-b1d9-5b30d70e320f tempest-ServersTestJSON-536512511 tempest-ServersTestJSON-536512511-project-member] Using /dev/sd instead of None {{(pid=61962) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 2370.993623] env[61962]: DEBUG nova.compute.manager [None req-b8b2c342-a551-47c8-b1d9-5b30d70e320f tempest-ServersTestJSON-536512511 tempest-ServersTestJSON-536512511-project-member] [instance: c78c4445-e980-47c4-a3cf-8db9b370d099] Allocating IP information in the background. {{(pid=61962) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1973}} [ 2370.993797] env[61962]: DEBUG nova.network.neutron [None req-b8b2c342-a551-47c8-b1d9-5b30d70e320f tempest-ServersTestJSON-536512511 tempest-ServersTestJSON-536512511-project-member] [instance: c78c4445-e980-47c4-a3cf-8db9b370d099] allocate_for_instance() {{(pid=61962) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 2371.004721] env[61962]: DEBUG nova.compute.manager [None req-b8b2c342-a551-47c8-b1d9-5b30d70e320f tempest-ServersTestJSON-536512511 tempest-ServersTestJSON-536512511-project-member] [instance: c78c4445-e980-47c4-a3cf-8db9b370d099] Start building block device mappings for instance. {{(pid=61962) _build_resources /opt/stack/nova/nova/compute/manager.py:2856}} [ 2371.062162] env[61962]: DEBUG nova.policy [None req-b8b2c342-a551-47c8-b1d9-5b30d70e320f tempest-ServersTestJSON-536512511 tempest-ServersTestJSON-536512511-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '0be80fa75c4c49d899a3dac926c1ed37', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'cc34536d51ef4a22aef0b274ba5aa28e', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61962) authorize /opt/stack/nova/nova/policy.py:203}} [ 2371.073951] env[61962]: DEBUG nova.compute.manager [None req-b8b2c342-a551-47c8-b1d9-5b30d70e320f tempest-ServersTestJSON-536512511 tempest-ServersTestJSON-536512511-project-member] [instance: c78c4445-e980-47c4-a3cf-8db9b370d099] Start spawning the instance on the hypervisor. {{(pid=61962) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2630}} [ 2371.102572] env[61962]: DEBUG nova.virt.hardware [None req-b8b2c342-a551-47c8-b1d9-5b30d70e320f tempest-ServersTestJSON-536512511 tempest-ServersTestJSON-536512511-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-12-01T12:09:23Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=128,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-12-01T12:09:07Z,direct_url=,disk_format='vmdk',id=f684bb17-3ab2-4bd5-a19e-4c36c57d0ebe,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='eef556fb5c4f49b889491ae31a496de5',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-12-01T12:09:08Z,virtual_size=,visibility=), allow threads: False {{(pid=61962) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 2371.102863] env[61962]: DEBUG nova.virt.hardware [None req-b8b2c342-a551-47c8-b1d9-5b30d70e320f tempest-ServersTestJSON-536512511 tempest-ServersTestJSON-536512511-project-member] Flavor limits 0:0:0 {{(pid=61962) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 2371.103071] env[61962]: DEBUG nova.virt.hardware [None req-b8b2c342-a551-47c8-b1d9-5b30d70e320f tempest-ServersTestJSON-536512511 tempest-ServersTestJSON-536512511-project-member] Image limits 0:0:0 {{(pid=61962) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 2371.103312] env[61962]: DEBUG nova.virt.hardware [None req-b8b2c342-a551-47c8-b1d9-5b30d70e320f tempest-ServersTestJSON-536512511 tempest-ServersTestJSON-536512511-project-member] Flavor pref 0:0:0 {{(pid=61962) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 2371.103498] env[61962]: DEBUG nova.virt.hardware [None req-b8b2c342-a551-47c8-b1d9-5b30d70e320f tempest-ServersTestJSON-536512511 tempest-ServersTestJSON-536512511-project-member] Image pref 0:0:0 {{(pid=61962) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 2371.103651] env[61962]: DEBUG nova.virt.hardware [None req-b8b2c342-a551-47c8-b1d9-5b30d70e320f tempest-ServersTestJSON-536512511 tempest-ServersTestJSON-536512511-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61962) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 2371.103871] env[61962]: DEBUG nova.virt.hardware [None req-b8b2c342-a551-47c8-b1d9-5b30d70e320f tempest-ServersTestJSON-536512511 tempest-ServersTestJSON-536512511-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61962) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 2371.104820] env[61962]: DEBUG nova.virt.hardware [None req-b8b2c342-a551-47c8-b1d9-5b30d70e320f tempest-ServersTestJSON-536512511 tempest-ServersTestJSON-536512511-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61962) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 2371.105092] env[61962]: DEBUG nova.virt.hardware [None req-b8b2c342-a551-47c8-b1d9-5b30d70e320f tempest-ServersTestJSON-536512511 tempest-ServersTestJSON-536512511-project-member] Got 1 possible topologies {{(pid=61962) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 2371.105825] env[61962]: DEBUG nova.virt.hardware [None req-b8b2c342-a551-47c8-b1d9-5b30d70e320f tempest-ServersTestJSON-536512511 tempest-ServersTestJSON-536512511-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61962) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 2371.106208] env[61962]: DEBUG nova.virt.hardware [None req-b8b2c342-a551-47c8-b1d9-5b30d70e320f tempest-ServersTestJSON-536512511 tempest-ServersTestJSON-536512511-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61962) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 2371.108133] env[61962]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7d367273-4218-48be-b546-2dfbd9c1e748 {{(pid=61962) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2371.119218] env[61962]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-66703402-237a-49ee-9b54-2e8b935bca9a {{(pid=61962) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2371.645192] env[61962]: DEBUG nova.network.neutron [None req-b8b2c342-a551-47c8-b1d9-5b30d70e320f tempest-ServersTestJSON-536512511 tempest-ServersTestJSON-536512511-project-member] [instance: c78c4445-e980-47c4-a3cf-8db9b370d099] Successfully created port: b1c76b44-1608-4235-b1e0-789cf1426a65 {{(pid=61962) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 2372.186088] env[61962]: DEBUG nova.compute.manager [req-e2368afd-bb71-4b9e-b451-7724fb295b84 req-0607f84d-0c73-4844-9a27-71cc428dab75 service nova] [instance: c78c4445-e980-47c4-a3cf-8db9b370d099] Received event network-vif-plugged-b1c76b44-1608-4235-b1e0-789cf1426a65 {{(pid=61962) external_instance_event /opt/stack/nova/nova/compute/manager.py:11210}} [ 2372.186319] env[61962]: DEBUG oslo_concurrency.lockutils [req-e2368afd-bb71-4b9e-b451-7724fb295b84 req-0607f84d-0c73-4844-9a27-71cc428dab75 service nova] Acquiring lock "c78c4445-e980-47c4-a3cf-8db9b370d099-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=61962) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2372.186524] env[61962]: DEBUG oslo_concurrency.lockutils [req-e2368afd-bb71-4b9e-b451-7724fb295b84 req-0607f84d-0c73-4844-9a27-71cc428dab75 service nova] Lock "c78c4445-e980-47c4-a3cf-8db9b370d099-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=61962) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2372.186689] env[61962]: DEBUG oslo_concurrency.lockutils [req-e2368afd-bb71-4b9e-b451-7724fb295b84 req-0607f84d-0c73-4844-9a27-71cc428dab75 service nova] Lock "c78c4445-e980-47c4-a3cf-8db9b370d099-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=61962) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2372.186853] env[61962]: DEBUG nova.compute.manager [req-e2368afd-bb71-4b9e-b451-7724fb295b84 req-0607f84d-0c73-4844-9a27-71cc428dab75 service nova] [instance: c78c4445-e980-47c4-a3cf-8db9b370d099] No waiting events found dispatching network-vif-plugged-b1c76b44-1608-4235-b1e0-789cf1426a65 {{(pid=61962) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 2372.187347] env[61962]: WARNING nova.compute.manager [req-e2368afd-bb71-4b9e-b451-7724fb295b84 req-0607f84d-0c73-4844-9a27-71cc428dab75 service nova] [instance: c78c4445-e980-47c4-a3cf-8db9b370d099] Received unexpected event network-vif-plugged-b1c76b44-1608-4235-b1e0-789cf1426a65 for instance with vm_state building and task_state spawning. [ 2372.272323] env[61962]: DEBUG nova.network.neutron [None req-b8b2c342-a551-47c8-b1d9-5b30d70e320f tempest-ServersTestJSON-536512511 tempest-ServersTestJSON-536512511-project-member] [instance: c78c4445-e980-47c4-a3cf-8db9b370d099] Successfully updated port: b1c76b44-1608-4235-b1e0-789cf1426a65 {{(pid=61962) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 2372.284658] env[61962]: DEBUG oslo_concurrency.lockutils [None req-b8b2c342-a551-47c8-b1d9-5b30d70e320f tempest-ServersTestJSON-536512511 tempest-ServersTestJSON-536512511-project-member] Acquiring lock "refresh_cache-c78c4445-e980-47c4-a3cf-8db9b370d099" {{(pid=61962) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2372.284818] env[61962]: DEBUG oslo_concurrency.lockutils [None req-b8b2c342-a551-47c8-b1d9-5b30d70e320f tempest-ServersTestJSON-536512511 tempest-ServersTestJSON-536512511-project-member] Acquired lock "refresh_cache-c78c4445-e980-47c4-a3cf-8db9b370d099" {{(pid=61962) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2372.284950] env[61962]: DEBUG nova.network.neutron [None req-b8b2c342-a551-47c8-b1d9-5b30d70e320f tempest-ServersTestJSON-536512511 tempest-ServersTestJSON-536512511-project-member] [instance: c78c4445-e980-47c4-a3cf-8db9b370d099] Building network info cache for instance {{(pid=61962) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 2372.328668] env[61962]: DEBUG nova.network.neutron [None req-b8b2c342-a551-47c8-b1d9-5b30d70e320f tempest-ServersTestJSON-536512511 tempest-ServersTestJSON-536512511-project-member] [instance: c78c4445-e980-47c4-a3cf-8db9b370d099] Instance cache missing network info. {{(pid=61962) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 2372.559163] env[61962]: DEBUG nova.network.neutron [None req-b8b2c342-a551-47c8-b1d9-5b30d70e320f tempest-ServersTestJSON-536512511 tempest-ServersTestJSON-536512511-project-member] [instance: c78c4445-e980-47c4-a3cf-8db9b370d099] Updating instance_info_cache with network_info: [{"id": "b1c76b44-1608-4235-b1e0-789cf1426a65", "address": "fa:16:3e:b3:46:21", "network": {"id": "d8431398-17df-4a56-85cb-1034c177202a", "bridge": "br-int", "label": "tempest-ServersTestJSON-1395925254-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "cc34536d51ef4a22aef0b274ba5aa28e", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c4d3f69a-b086-4c3b-b976-5a848b63dfc4", "external-id": "nsx-vlan-transportzone-627", "segmentation_id": 627, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapb1c76b44-16", "ovs_interfaceid": "b1c76b44-1608-4235-b1e0-789cf1426a65", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61962) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2372.572477] env[61962]: DEBUG oslo_concurrency.lockutils [None req-b8b2c342-a551-47c8-b1d9-5b30d70e320f tempest-ServersTestJSON-536512511 tempest-ServersTestJSON-536512511-project-member] Releasing lock "refresh_cache-c78c4445-e980-47c4-a3cf-8db9b370d099" {{(pid=61962) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2372.572870] env[61962]: DEBUG nova.compute.manager [None req-b8b2c342-a551-47c8-b1d9-5b30d70e320f tempest-ServersTestJSON-536512511 tempest-ServersTestJSON-536512511-project-member] [instance: c78c4445-e980-47c4-a3cf-8db9b370d099] Instance network_info: |[{"id": "b1c76b44-1608-4235-b1e0-789cf1426a65", "address": "fa:16:3e:b3:46:21", "network": {"id": "d8431398-17df-4a56-85cb-1034c177202a", "bridge": "br-int", "label": "tempest-ServersTestJSON-1395925254-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "cc34536d51ef4a22aef0b274ba5aa28e", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c4d3f69a-b086-4c3b-b976-5a848b63dfc4", "external-id": "nsx-vlan-transportzone-627", "segmentation_id": 627, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapb1c76b44-16", "ovs_interfaceid": "b1c76b44-1608-4235-b1e0-789cf1426a65", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=61962) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 2372.573642] env[61962]: DEBUG nova.virt.vmwareapi.vmops [None req-b8b2c342-a551-47c8-b1d9-5b30d70e320f tempest-ServersTestJSON-536512511 tempest-ServersTestJSON-536512511-project-member] [instance: c78c4445-e980-47c4-a3cf-8db9b370d099] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:b3:46:21', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'c4d3f69a-b086-4c3b-b976-5a848b63dfc4', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'b1c76b44-1608-4235-b1e0-789cf1426a65', 'vif_model': 'vmxnet3'}] {{(pid=61962) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 2372.581581] env[61962]: DEBUG oslo.service.loopingcall [None req-b8b2c342-a551-47c8-b1d9-5b30d70e320f tempest-ServersTestJSON-536512511 tempest-ServersTestJSON-536512511-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=61962) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 2372.582524] env[61962]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: c78c4445-e980-47c4-a3cf-8db9b370d099] Creating VM on the ESX host {{(pid=61962) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 2372.582828] env[61962]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-96523848-fb6f-4a7c-bbd1-afd4ac652c6b {{(pid=61962) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2372.603373] env[61962]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 2372.603373] env[61962]: value = "task-4892146" [ 2372.603373] env[61962]: _type = "Task" [ 2372.603373] env[61962]: } to complete. {{(pid=61962) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2372.613360] env[61962]: DEBUG oslo_vmware.api [-] Task: {'id': task-4892146, 'name': CreateVM_Task} progress is 0%. {{(pid=61962) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2373.114964] env[61962]: DEBUG oslo_vmware.api [-] Task: {'id': task-4892146, 'name': CreateVM_Task, 'duration_secs': 0.357155} completed successfully. {{(pid=61962) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2373.115409] env[61962]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: c78c4445-e980-47c4-a3cf-8db9b370d099] Created VM on the ESX host {{(pid=61962) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 2373.116272] env[61962]: DEBUG oslo_concurrency.lockutils [None req-b8b2c342-a551-47c8-b1d9-5b30d70e320f tempest-ServersTestJSON-536512511 tempest-ServersTestJSON-536512511-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/f684bb17-3ab2-4bd5-a19e-4c36c57d0ebe" {{(pid=61962) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2373.116540] env[61962]: DEBUG oslo_concurrency.lockutils [None req-b8b2c342-a551-47c8-b1d9-5b30d70e320f tempest-ServersTestJSON-536512511 tempest-ServersTestJSON-536512511-project-member] Acquired lock "[datastore2] devstack-image-cache_base/f684bb17-3ab2-4bd5-a19e-4c36c57d0ebe" {{(pid=61962) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2373.117043] env[61962]: DEBUG oslo_concurrency.lockutils [None req-b8b2c342-a551-47c8-b1d9-5b30d70e320f tempest-ServersTestJSON-536512511 tempest-ServersTestJSON-536512511-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/f684bb17-3ab2-4bd5-a19e-4c36c57d0ebe" {{(pid=61962) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 2373.117553] env[61962]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-cc1a0da4-3257-44fb-be7b-2452716aa77d {{(pid=61962) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2373.123613] env[61962]: DEBUG oslo_vmware.api [None req-b8b2c342-a551-47c8-b1d9-5b30d70e320f tempest-ServersTestJSON-536512511 tempest-ServersTestJSON-536512511-project-member] Waiting for the task: (returnval){ [ 2373.123613] env[61962]: value = "session[5210918f-aadc-9b29-42fa-0929c2e42627]52e4175c-b1bd-97b2-45ce-1189ff539bfe" [ 2373.123613] env[61962]: _type = "Task" [ 2373.123613] env[61962]: } to complete. {{(pid=61962) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2373.132932] env[61962]: DEBUG oslo_vmware.api [None req-b8b2c342-a551-47c8-b1d9-5b30d70e320f tempest-ServersTestJSON-536512511 tempest-ServersTestJSON-536512511-project-member] Task: {'id': session[5210918f-aadc-9b29-42fa-0929c2e42627]52e4175c-b1bd-97b2-45ce-1189ff539bfe, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61962) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2373.634793] env[61962]: DEBUG oslo_concurrency.lockutils [None req-b8b2c342-a551-47c8-b1d9-5b30d70e320f tempest-ServersTestJSON-536512511 tempest-ServersTestJSON-536512511-project-member] Releasing lock "[datastore2] devstack-image-cache_base/f684bb17-3ab2-4bd5-a19e-4c36c57d0ebe" {{(pid=61962) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2373.635016] env[61962]: DEBUG nova.virt.vmwareapi.vmops [None req-b8b2c342-a551-47c8-b1d9-5b30d70e320f tempest-ServersTestJSON-536512511 tempest-ServersTestJSON-536512511-project-member] [instance: c78c4445-e980-47c4-a3cf-8db9b370d099] Processing image f684bb17-3ab2-4bd5-a19e-4c36c57d0ebe {{(pid=61962) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 2373.635248] env[61962]: DEBUG oslo_concurrency.lockutils [None req-b8b2c342-a551-47c8-b1d9-5b30d70e320f tempest-ServersTestJSON-536512511 tempest-ServersTestJSON-536512511-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/f684bb17-3ab2-4bd5-a19e-4c36c57d0ebe/f684bb17-3ab2-4bd5-a19e-4c36c57d0ebe.vmdk" {{(pid=61962) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2374.217669] env[61962]: DEBUG nova.compute.manager [req-c0e005b4-3fbc-4200-bb15-eb8ade042000 req-20cb70f5-3fcf-43ce-9b53-da6ce4ad16b2 service nova] [instance: c78c4445-e980-47c4-a3cf-8db9b370d099] Received event network-changed-b1c76b44-1608-4235-b1e0-789cf1426a65 {{(pid=61962) external_instance_event /opt/stack/nova/nova/compute/manager.py:11210}} [ 2374.217952] env[61962]: DEBUG nova.compute.manager [req-c0e005b4-3fbc-4200-bb15-eb8ade042000 req-20cb70f5-3fcf-43ce-9b53-da6ce4ad16b2 service nova] [instance: c78c4445-e980-47c4-a3cf-8db9b370d099] Refreshing instance network info cache due to event network-changed-b1c76b44-1608-4235-b1e0-789cf1426a65. {{(pid=61962) external_instance_event /opt/stack/nova/nova/compute/manager.py:11215}} [ 2374.218178] env[61962]: DEBUG oslo_concurrency.lockutils [req-c0e005b4-3fbc-4200-bb15-eb8ade042000 req-20cb70f5-3fcf-43ce-9b53-da6ce4ad16b2 service nova] Acquiring lock "refresh_cache-c78c4445-e980-47c4-a3cf-8db9b370d099" {{(pid=61962) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2374.218224] env[61962]: DEBUG oslo_concurrency.lockutils [req-c0e005b4-3fbc-4200-bb15-eb8ade042000 req-20cb70f5-3fcf-43ce-9b53-da6ce4ad16b2 service nova] Acquired lock "refresh_cache-c78c4445-e980-47c4-a3cf-8db9b370d099" {{(pid=61962) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2374.218380] env[61962]: DEBUG nova.network.neutron [req-c0e005b4-3fbc-4200-bb15-eb8ade042000 req-20cb70f5-3fcf-43ce-9b53-da6ce4ad16b2 service nova] [instance: c78c4445-e980-47c4-a3cf-8db9b370d099] Refreshing network info cache for port b1c76b44-1608-4235-b1e0-789cf1426a65 {{(pid=61962) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 2374.522861] env[61962]: DEBUG nova.network.neutron [req-c0e005b4-3fbc-4200-bb15-eb8ade042000 req-20cb70f5-3fcf-43ce-9b53-da6ce4ad16b2 service nova] [instance: c78c4445-e980-47c4-a3cf-8db9b370d099] Updated VIF entry in instance network info cache for port b1c76b44-1608-4235-b1e0-789cf1426a65. {{(pid=61962) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 2374.523243] env[61962]: DEBUG nova.network.neutron [req-c0e005b4-3fbc-4200-bb15-eb8ade042000 req-20cb70f5-3fcf-43ce-9b53-da6ce4ad16b2 service nova] [instance: c78c4445-e980-47c4-a3cf-8db9b370d099] Updating instance_info_cache with network_info: [{"id": "b1c76b44-1608-4235-b1e0-789cf1426a65", "address": "fa:16:3e:b3:46:21", "network": {"id": "d8431398-17df-4a56-85cb-1034c177202a", "bridge": "br-int", "label": "tempest-ServersTestJSON-1395925254-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "cc34536d51ef4a22aef0b274ba5aa28e", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c4d3f69a-b086-4c3b-b976-5a848b63dfc4", "external-id": "nsx-vlan-transportzone-627", "segmentation_id": 627, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapb1c76b44-16", "ovs_interfaceid": "b1c76b44-1608-4235-b1e0-789cf1426a65", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61962) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2374.532369] env[61962]: DEBUG oslo_concurrency.lockutils [req-c0e005b4-3fbc-4200-bb15-eb8ade042000 req-20cb70f5-3fcf-43ce-9b53-da6ce4ad16b2 service nova] Releasing lock "refresh_cache-c78c4445-e980-47c4-a3cf-8db9b370d099" {{(pid=61962) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2378.158528] env[61962]: DEBUG oslo_service.periodic_task [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Running periodic task ComputeManager.update_available_resource {{(pid=61962) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2378.173039] env[61962]: DEBUG oslo_concurrency.lockutils [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=61962) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2378.173039] env[61962]: DEBUG oslo_concurrency.lockutils [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=61962) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2378.173039] env[61962]: DEBUG oslo_concurrency.lockutils [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=61962) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2378.173039] env[61962]: DEBUG nova.compute.resource_tracker [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=61962) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 2378.173938] env[61962]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-32830fbc-b2af-4602-8ce8-78e60d59050b {{(pid=61962) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2378.183966] env[61962]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bf52fe62-2749-4010-9e4c-e4ee119469a5 {{(pid=61962) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2378.200454] env[61962]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5f6bb7bb-e6df-49c9-a7f6-b7bdcb4969af {{(pid=61962) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2378.209408] env[61962]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f979a018-0181-4ae6-a9e7-402c57d4dbdd {{(pid=61962) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2378.243083] env[61962]: DEBUG nova.compute.resource_tracker [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=180567MB free_disk=96GB free_vcpus=48 pci_devices=None {{(pid=61962) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 2378.243480] env[61962]: DEBUG oslo_concurrency.lockutils [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=61962) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2378.243796] env[61962]: DEBUG oslo_concurrency.lockutils [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=61962) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2378.306024] env[61962]: DEBUG nova.compute.resource_tracker [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Instance 12306bfe-5154-4508-bf54-bbe744caee05 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61962) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 2378.306024] env[61962]: DEBUG nova.compute.resource_tracker [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Instance c78c4445-e980-47c4-a3cf-8db9b370d099 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61962) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 2378.306024] env[61962]: DEBUG nova.compute.resource_tracker [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Total usable vcpus: 48, total allocated vcpus: 2 {{(pid=61962) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 2378.306024] env[61962]: DEBUG nova.compute.resource_tracker [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=768MB phys_disk=100GB used_disk=2GB total_vcpus=48 used_vcpus=2 pci_stats=[] stats={'failed_builds': '6', 'num_instances': '2', 'num_vm_building': '2', 'num_task_spawning': '2', 'num_os_type_None': '2', 'num_proj_d09904dce58b4cee83cebb57e6ad26a4': '1', 'io_workload': '2', 'num_proj_cc34536d51ef4a22aef0b274ba5aa28e': '1'} {{(pid=61962) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 2378.355316] env[61962]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-da330c82-399d-4cc1-9d31-67fdbdf7fc5c {{(pid=61962) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2378.363497] env[61962]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-53c65975-45da-4493-9172-d547cf8e5a81 {{(pid=61962) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2378.397403] env[61962]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d710f5bc-ade8-45a0-9894-db1d2a21a7ba {{(pid=61962) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2378.407124] env[61962]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bb800aa0-6c25-4641-a961-afd07b7df97f {{(pid=61962) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2378.423545] env[61962]: DEBUG nova.compute.provider_tree [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Inventory has not changed in ProviderTree for provider: 5a20dc57-fddd-49ec-bab5-953a03eebaa1 {{(pid=61962) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2378.434144] env[61962]: DEBUG nova.scheduler.client.report [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Inventory has not changed for provider 5a20dc57-fddd-49ec-bab5-953a03eebaa1 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 96, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61962) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 2378.450928] env[61962]: DEBUG nova.compute.resource_tracker [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=61962) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 2378.450928] env[61962]: DEBUG oslo_concurrency.lockutils [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 0.207s {{(pid=61962) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2383.451512] env[61962]: DEBUG oslo_service.periodic_task [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=61962) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2383.451954] env[61962]: DEBUG nova.compute.manager [None req-ba337279-4320-40ca-b616-7e590432f203 None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=61962) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10626}} [ 2387.154188] env[61962]: DEBUG oslo_service.periodic_task [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=61962) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2388.158577] env[61962]: DEBUG oslo_service.periodic_task [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=61962) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2389.159637] env[61962]: DEBUG oslo_service.periodic_task [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=61962) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2389.159987] env[61962]: DEBUG nova.compute.manager [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Starting heal instance info cache {{(pid=61962) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10007}} [ 2389.159987] env[61962]: DEBUG nova.compute.manager [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Rebuilding the list of instances to heal {{(pid=61962) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10011}} [ 2389.174891] env[61962]: DEBUG nova.compute.manager [None req-ba337279-4320-40ca-b616-7e590432f203 None None] [instance: 12306bfe-5154-4508-bf54-bbe744caee05] Skipping network cache update for instance because it is Building. {{(pid=61962) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10020}} [ 2389.175090] env[61962]: DEBUG nova.compute.manager [None req-ba337279-4320-40ca-b616-7e590432f203 None None] [instance: c78c4445-e980-47c4-a3cf-8db9b370d099] Skipping network cache update for instance because it is Building. {{(pid=61962) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10020}} [ 2389.175199] env[61962]: DEBUG nova.compute.manager [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Didn't find any instances for network info cache update. {{(pid=61962) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10093}} [ 2389.175706] env[61962]: DEBUG oslo_service.periodic_task [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=61962) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2393.158057] env[61962]: DEBUG oslo_service.periodic_task [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=61962) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2394.159569] env[61962]: DEBUG oslo_service.periodic_task [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=61962) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2395.159219] env[61962]: DEBUG oslo_service.periodic_task [None req-ba337279-4320-40ca-b616-7e590432f203 None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=61962) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}}